CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
amanchadha

Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.

GitHub Repository: amanchadha/coursera-deep-learning-specialization
Path: blob/master/C4 - Convolutional Neural Networks/Week 2/ResNets/public_tests.py
Views: 4818
1
from termcolor import colored
2
import tensorflow as tf
3
from tensorflow.keras.initializers import random_uniform, glorot_uniform, constant, identity
4
import numpy as np
5
6
def identity_block_test(target):
7
np.random.seed(1)
8
#X = np.random.randn(3, 4, 4, 6).astype(np.float32)
9
X1 = np.ones((1, 4, 4, 3)) * -1
10
X2 = np.ones((1, 4, 4, 3)) * 1
11
X3 = np.ones((1, 4, 4, 3)) * 3
12
13
X = np.concatenate((X1, X2, X3), axis = 0).astype(np.float32)
14
15
A3 = target(X,
16
f = 2,
17
filters = [4, 4, 3],
18
initializer=lambda seed=0:constant(value=1),
19
training=False)
20
21
22
A3np = A3.numpy()
23
assert tuple(A3np.shape) == (3, 4, 4, 3), "Shapes does not match. This is really weird"
24
assert np.all(A3np >= 0), "The ReLu activation at the last layer is missing"
25
resume = A3np[:,(0,-1),:,:].mean(axis = 3)
26
27
assert np.floor(resume[1, 0, 0]) == 2 * np.floor(resume[1, 0, 3]), "Check the padding and strides"
28
assert np.floor(resume[1, 0, 3]) == np.floor(resume[1, 1, 0]), "Check the padding and strides"
29
assert np.floor(resume[1, 1, 0]) == 2 * np.floor(resume[1, 1, 3]), "Check the padding and strides"
30
assert np.floor(resume[1, 1, 0]) == 2 * np.floor(resume[1, 1, 3]), "Check the padding and strides"
31
32
assert resume[1, 1, 0] - np.floor(resume[1, 1, 0]) > 0.7, "Looks like the BatchNormalization units are not working"
33
34
assert np.allclose(resume,
35
np.array([[[0.0, 0.0, 0.0, 0.0],
36
[0.0, 0.0, 0.0, 0.0]],
37
[[192.71236, 192.71236, 192.71236, 96.85619],
38
[ 96.85619, 96.85619, 96.85619, 48.9281 ]],
39
[[578.1371, 578.1371, 578.1371, 290.56854],
40
[290.56854, 290.56854, 290.56854, 146.78427]]]), atol = 1e-5 ), "Wrong values with training=False"
41
42
np.random.seed(1)
43
A4 = target(X,
44
f = 3,
45
filters = [3, 3, 3],
46
initializer=lambda seed=7:constant(value=1),
47
training=True)
48
A4np = A4.numpy()
49
resume = A4np[:,(0,-1),:,:].mean(axis = 3)
50
assert np.allclose(resume,
51
np.array([[[0., 0., 0., 0., ],
52
[0., 0., 0., 0., ]],
53
[[0.37394285, 0.37394285, 0.37394285, 0.37394285],
54
[0.37394285, 0.37394285, 0.37394285, 0.37394285]],
55
[[3.2379014, 4.1394243, 4.1394243, 3.2379014 ],
56
[3.2379014, 4.1394243, 4.1394243, 3.2379014 ]]]), atol = 1e-5 ), "Wrong values with training=True"
57
58
print(colored("All tests passed!", "green"))
59
60