Add failure cases messages to BatchNorm test notebook
This commit is contained in:
parent
b42365c014
commit
e7fc62cd09
@ -41,24 +41,18 @@
|
|||||||
" 0.79999177, -0.1999984 , -0.19999221, 0.79999528, -0.19999926],\n",
|
" 0.79999177, -0.1999984 , -0.19999221, 0.79999528, -0.19999926],\n",
|
||||||
" [ 0.7999955 , 0.79998686, 0.79999924, 0.7996655 , -0.19999899,\n",
|
" [ 0.7999955 , 0.79998686, 0.79999924, 0.7996655 , -0.19999899,\n",
|
||||||
" -0.19999177, 0.7999984 , 0.79999221, -0.19999528, 0.79999926]])\n",
|
" -0.19999177, 0.7999984 , 0.79999221, -0.19999528, 0.79999926]])\n",
|
||||||
"shape_test=BN_fprop.shape == true_fprop_outputs.shape, (\n",
|
"assert BN_fprop.shape == true_fprop_outputs.shape, (\n",
|
||||||
" 'Layer bprop returns incorrect shaped array. '\n",
|
" 'Layer bprop returns incorrect shaped array. '\n",
|
||||||
" 'Correct shape is \\n\\n{0}\\n\\n but returned shape is \\n\\n{1}.'\n",
|
" 'Correct shape is \\n\\n{0}\\n\\n but returned shape is \\n\\n{1}.'\n",
|
||||||
" .format(true_fprop_outputs.shape, BN_fprop.shape)\n",
|
" .format(true_fprop_outputs.shape, BN_fprop.shape)\n",
|
||||||
")\n",
|
")\n",
|
||||||
"numerical_test=np.allclose(np.round(BN_fprop, decimals=2), np.round(true_fprop_outputs, decimals=2)), (\n",
|
"assert np.allclose(np.round(BN_fprop, decimals=2), np.round(true_fprop_outputs, decimals=2)), (\n",
|
||||||
"'Layer bprop does not return correct values. '\n",
|
"'Layer bprop does not return correct values. '\n",
|
||||||
"'Correct output is \\n\\n{0}\\n\\n but returned output is \\n\\n{1}\\n\\n difference is \\n\\n{2}'\n",
|
"'Correct output is \\n\\n{0}\\n\\n but returned output is \\n\\n{1}\\n\\n difference is \\n\\n{2}'\n",
|
||||||
".format(true_fprop_outputs, BN_fprop, BN_fprop-true_fprop_outputs)\n",
|
".format(true_fprop_outputs, BN_fprop, BN_fprop-true_fprop_outputs)\n",
|
||||||
")\n",
|
")\n",
|
||||||
"\n",
|
"\n",
|
||||||
"if shape_test and numerical_test:\n",
|
"print(\"Batch Normalization F-prop test passed\")"
|
||||||
" print(\"Batch Normalization F-prop test passed\")\n",
|
|
||||||
"if numerical_test==False:\n",
|
|
||||||
" print(\"Batch Normalization F-prop numerical test failed\")\n",
|
|
||||||
"if shape_test==False:\n",
|
|
||||||
" print(\"Batch Normalization F-prop shape test failed\")\n",
|
|
||||||
" "
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -75,23 +69,18 @@
|
|||||||
" -6.85384297e-03, -9.40668131e-07, -7.99795574e-06,\n",
|
" -6.85384297e-03, -9.40668131e-07, -7.99795574e-06,\n",
|
||||||
" -5.03719464e-07, -1.69038704e-05, 1.82061629e-05,\n",
|
" -5.03719464e-07, -1.69038704e-05, 1.82061629e-05,\n",
|
||||||
" -5.62083224e-07]])\n",
|
" -5.62083224e-07]])\n",
|
||||||
"shape_test=BN_bprop.shape == true_bprop_outputs.shape, (\n",
|
"assert BN_bprop.shape == true_bprop_outputs.shape, (\n",
|
||||||
" 'Layer bprop returns incorrect shaped array. '\n",
|
" 'Layer bprop returns incorrect shaped array. '\n",
|
||||||
" 'Correct shape is \\n\\n{0}\\n\\n but returned shape is \\n\\n{1}.'\n",
|
" 'Correct shape is \\n\\n{0}\\n\\n but returned shape is \\n\\n{1}.'\n",
|
||||||
" .format(true_bprop_outputs.shape, BN_bprop.shape)\n",
|
" .format(true_bprop_outputs.shape, BN_bprop.shape)\n",
|
||||||
")\n",
|
")\n",
|
||||||
"numerical_test=np.allclose(np.round(BN_bprop, decimals=2), np.round(true_bprop_outputs, decimals=2)), (\n",
|
"assert np.allclose(np.round(BN_bprop, decimals=2), np.round(true_bprop_outputs, decimals=2)), (\n",
|
||||||
"'Layer bprop does not return correct values. '\n",
|
"'Layer bprop does not return correct values. '\n",
|
||||||
"'Correct output is \\n\\n{0}\\n\\n but returned output is \\n\\n{1}\\n\\n difference is \\n\\n{2}'\n",
|
"'Correct output is \\n\\n{0}\\n\\n but returned output is \\n\\n{1}\\n\\n difference is \\n\\n{2}'\n",
|
||||||
".format(true_bprop_outputs, BN_bprop, BN_bprop-true_bprop_outputs)\n",
|
".format(true_bprop_outputs, BN_bprop, BN_bprop-true_bprop_outputs)\n",
|
||||||
")\n",
|
")\n",
|
||||||
"\n",
|
"\n",
|
||||||
"if shape_test and numerical_test:\n",
|
"print(\"Batch Normalization B-prop test passed\")"
|
||||||
" print(\"Batch Normalization B-prop test passed\")\n",
|
|
||||||
"if numerical_test==False:\n",
|
|
||||||
" print(\"Batch Normalization B-prop numerical test failed\")\n",
|
|
||||||
"if shape_test==False:\n",
|
|
||||||
" print(\"Batch Normalization B-prop shape test failed\")"
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -106,38 +95,29 @@
|
|||||||
"true_grads_wrt_beta = np.array([ 0.63944963, 1.70281254, -0.36821806, -1.76256935, -1.02948485,\n",
|
"true_grads_wrt_beta = np.array([ 0.63944963, 1.70281254, -0.36821806, -1.76256935, -1.02948485,\n",
|
||||||
" -0.77909018, -0.62342786, 0.24832055, 0.46500505, -0.01934809])\n",
|
" -0.77909018, -0.62342786, 0.24832055, 0.46500505, -0.01934809])\n",
|
||||||
"\n",
|
"\n",
|
||||||
"grads_gamma_shape_test=grads_wrt_gamma.shape == true_grads_wrt_gamma.shape, (\n",
|
"assert grads_wrt_gamma.shape == true_grads_wrt_gamma.shape, (\n",
|
||||||
" 'Layer bprop returns incorrect shaped array. '\n",
|
" 'Layer bprop returns incorrect shaped array. '\n",
|
||||||
" 'Correct shape is \\n\\n{0}\\n\\n but returned shape is \\n\\n{1}.'\n",
|
" 'Correct shape is \\n\\n{0}\\n\\n but returned shape is \\n\\n{1}.'\n",
|
||||||
" .format(true_grads_wrt_gamma.shape, grads_wrt_gamma.shape)\n",
|
" .format(true_grads_wrt_gamma.shape, grads_wrt_gamma.shape)\n",
|
||||||
")\n",
|
")\n",
|
||||||
"grads_gamma_numerical_test=np.allclose(np.round(grads_wrt_gamma, decimals=2), np.round(true_grads_wrt_gamma, decimals=2)), (\n",
|
"assert np.allclose(np.round(grads_wrt_gamma, decimals=2), np.round(true_grads_wrt_gamma, decimals=2)), (\n",
|
||||||
"'Layer bprop does not return correct values. '\n",
|
"'Layer bprop does not return correct values. '\n",
|
||||||
"'Correct output is \\n\\n{0}\\n\\n but returned output is \\n\\n{1}\\n\\n difference is \\n\\n{2}'\n",
|
"'Correct output is \\n\\n{0}\\n\\n but returned output is \\n\\n{1}\\n\\n difference is \\n\\n{2}'\n",
|
||||||
".format(true_grads_wrt_gamma, grads_wrt_gamma, grads_wrt_gamma-true_grads_wrt_gamma)\n",
|
".format(true_grads_wrt_gamma, grads_wrt_gamma, grads_wrt_gamma-true_grads_wrt_gamma)\n",
|
||||||
")\n",
|
")\n",
|
||||||
"\n",
|
"\n",
|
||||||
"grads_beta_shape_test=grads_wrt_beta.shape == true_grads_wrt_beta.shape, (\n",
|
"assert grads_wrt_beta.shape == true_grads_wrt_beta.shape, (\n",
|
||||||
" 'Layer bprop returns incorrect shaped array. '\n",
|
" 'Layer bprop returns incorrect shaped array. '\n",
|
||||||
" 'Correct shape is \\n\\n{0}\\n\\n but returned shape is \\n\\n{1}.'\n",
|
" 'Correct shape is \\n\\n{0}\\n\\n but returned shape is \\n\\n{1}.'\n",
|
||||||
" .format(true_grads_wrt_beta.shape, grads_wrt_beta.shape)\n",
|
" .format(true_grads_wrt_beta.shape, grads_wrt_beta.shape)\n",
|
||||||
")\n",
|
")\n",
|
||||||
"grads_beta_numerical_test=np.allclose(np.round(grads_wrt_beta, decimals=2), np.round(true_grads_wrt_beta, decimals=2)), (\n",
|
"assert np.allclose(np.round(grads_wrt_beta, decimals=2), np.round(true_grads_wrt_beta, decimals=2)), (\n",
|
||||||
"'Layer bprop does not return correct values. '\n",
|
"'Layer bprop does not return correct values. '\n",
|
||||||
"'Correct output is \\n\\n{0}\\n\\n but returned output is \\n\\n{1}\\n\\n difference is \\n\\n{2}'\n",
|
"'Correct output is \\n\\n{0}\\n\\n but returned output is \\n\\n{1}\\n\\n difference is \\n\\n{2}'\n",
|
||||||
".format(true_grads_wrt_beta, grads_wrt_beta, grads_wrt_beta-true_grads_wrt_beta)\n",
|
".format(true_grads_wrt_beta, grads_wrt_beta, grads_wrt_beta-true_grads_wrt_beta)\n",
|
||||||
")\n",
|
")\n",
|
||||||
"\n",
|
"\n",
|
||||||
"if grads_gamma_shape_test and grads_gamma_numerical_test and grads_beta_shape_test and grads_beta_numerical_test:\n",
|
"print(\"Batch Normalization grads wrt to params test passed\")"
|
||||||
" print(\"Batch Normalization grads wrt to params test passed\")\n",
|
|
||||||
"if grads_gamma_numerical_test==False:\n",
|
|
||||||
" print(\"Batch Normalization grads_wrt_gamma numerical test failed\")\n",
|
|
||||||
"if grads_gamma_shape_test==False:\n",
|
|
||||||
" print(\"Batch Normalization grads_wrt_gamma shape test failed\")\n",
|
|
||||||
"if grads_beta_numerical_test==False:\n",
|
|
||||||
" print(\"Batch Normalization grads_wrt_beta numerical test failed\")\n",
|
|
||||||
"if grads_beta_shape_test==False:\n",
|
|
||||||
" print(\"Batch Normalization grads_wrt_beta shape test failed\")"
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
Loading…
Reference in New Issue
Block a user