CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
y33-j3T

Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.

GitHub Repository: y33-j3T/Coursera-Deep-Learning
Path: blob/master/Improving Deep Neural Networks Hyperparameter tuning, Regularization and Optimization/week5/Regularization/reg_utils.py
Views: 13377
1
import numpy as np
2
import matplotlib.pyplot as plt
3
import h5py
4
import sklearn
5
import sklearn.datasets
6
import sklearn.linear_model
7
import scipy.io
8
9
def sigmoid(x):
10
"""
11
Compute the sigmoid of x
12
13
Arguments:
14
x -- A scalar or numpy array of any size.
15
16
Return:
17
s -- sigmoid(x)
18
"""
19
s = 1/(1+np.exp(-x))
20
return s
21
22
def relu(x):
23
"""
24
Compute the relu of x
25
26
Arguments:
27
x -- A scalar or numpy array of any size.
28
29
Return:
30
s -- relu(x)
31
"""
32
s = np.maximum(0,x)
33
34
return s
35
36
def load_planar_dataset(seed):
37
38
np.random.seed(seed)
39
40
m = 400 # number of examples
41
N = int(m/2) # number of points per class
42
D = 2 # dimensionality
43
X = np.zeros((m,D)) # data matrix where each row is a single example
44
Y = np.zeros((m,1), dtype='uint8') # labels vector (0 for red, 1 for blue)
45
a = 4 # maximum ray of the flower
46
47
for j in range(2):
48
ix = range(N*j,N*(j+1))
49
t = np.linspace(j*3.12,(j+1)*3.12,N) + np.random.randn(N)*0.2 # theta
50
r = a*np.sin(4*t) + np.random.randn(N)*0.2 # radius
51
X[ix] = np.c_[r*np.sin(t), r*np.cos(t)]
52
Y[ix] = j
53
54
X = X.T
55
Y = Y.T
56
57
return X, Y
58
59
def initialize_parameters(layer_dims):
60
"""
61
Arguments:
62
layer_dims -- python array (list) containing the dimensions of each layer in our network
63
64
Returns:
65
parameters -- python dictionary containing your parameters "W1", "b1", ..., "WL", "bL":
66
W1 -- weight matrix of shape (layer_dims[l], layer_dims[l-1])
67
b1 -- bias vector of shape (layer_dims[l], 1)
68
Wl -- weight matrix of shape (layer_dims[l-1], layer_dims[l])
69
bl -- bias vector of shape (1, layer_dims[l])
70
71
Tips:
72
- For example: the layer_dims for the "Planar Data classification model" would have been [2,2,1].
73
This means W1's shape was (2,2), b1 was (1,2), W2 was (2,1) and b2 was (1,1). Now you have to generalize it!
74
- In the for loop, use parameters['W' + str(l)] to access Wl, where l is the iterative integer.
75
"""
76
77
np.random.seed(3)
78
parameters = {}
79
L = len(layer_dims) # number of layers in the network
80
81
for l in range(1, L):
82
parameters['W' + str(l)] = np.random.randn(layer_dims[l], layer_dims[l-1]) / np.sqrt(layer_dims[l-1])
83
parameters['b' + str(l)] = np.zeros((layer_dims[l], 1))
84
85
assert(parameters['W' + str(l)].shape == (layer_dims[l], layer_dims[l-1]))
86
assert(parameters['b' + str(l)].shape == (layer_dims[l], 1))
87
88
89
return parameters
90
91
def forward_propagation(X, parameters):
92
"""
93
Implements the forward propagation (and computes the loss) presented in Figure 2.
94
95
Arguments:
96
X -- input dataset, of shape (input size, number of examples)
97
parameters -- python dictionary containing your parameters "W1", "b1", "W2", "b2", "W3", "b3":
98
W1 -- weight matrix of shape ()
99
b1 -- bias vector of shape ()
100
W2 -- weight matrix of shape ()
101
b2 -- bias vector of shape ()
102
W3 -- weight matrix of shape ()
103
b3 -- bias vector of shape ()
104
105
Returns:
106
loss -- the loss function (vanilla logistic loss)
107
"""
108
109
# retrieve parameters
110
W1 = parameters["W1"]
111
b1 = parameters["b1"]
112
W2 = parameters["W2"]
113
b2 = parameters["b2"]
114
W3 = parameters["W3"]
115
b3 = parameters["b3"]
116
117
# LINEAR -> RELU -> LINEAR -> RELU -> LINEAR -> SIGMOID
118
Z1 = np.dot(W1, X) + b1
119
A1 = relu(Z1)
120
Z2 = np.dot(W2, A1) + b2
121
A2 = relu(Z2)
122
Z3 = np.dot(W3, A2) + b3
123
A3 = sigmoid(Z3)
124
125
cache = (Z1, A1, W1, b1, Z2, A2, W2, b2, Z3, A3, W3, b3)
126
127
return A3, cache
128
129
def backward_propagation(X, Y, cache):
130
"""
131
Implement the backward propagation presented in figure 2.
132
133
Arguments:
134
X -- input dataset, of shape (input size, number of examples)
135
Y -- true "label" vector (containing 0 if cat, 1 if non-cat)
136
cache -- cache output from forward_propagation()
137
138
Returns:
139
gradients -- A dictionary with the gradients with respect to each parameter, activation and pre-activation variables
140
"""
141
m = X.shape[1]
142
(Z1, A1, W1, b1, Z2, A2, W2, b2, Z3, A3, W3, b3) = cache
143
144
dZ3 = A3 - Y
145
dW3 = 1./m * np.dot(dZ3, A2.T)
146
db3 = 1./m * np.sum(dZ3, axis=1, keepdims = True)
147
148
dA2 = np.dot(W3.T, dZ3)
149
dZ2 = np.multiply(dA2, np.int64(A2 > 0))
150
dW2 = 1./m * np.dot(dZ2, A1.T)
151
db2 = 1./m * np.sum(dZ2, axis=1, keepdims = True)
152
153
dA1 = np.dot(W2.T, dZ2)
154
dZ1 = np.multiply(dA1, np.int64(A1 > 0))
155
dW1 = 1./m * np.dot(dZ1, X.T)
156
db1 = 1./m * np.sum(dZ1, axis=1, keepdims = True)
157
158
gradients = {"dZ3": dZ3, "dW3": dW3, "db3": db3,
159
"dA2": dA2, "dZ2": dZ2, "dW2": dW2, "db2": db2,
160
"dA1": dA1, "dZ1": dZ1, "dW1": dW1, "db1": db1}
161
162
return gradients
163
164
def update_parameters(parameters, grads, learning_rate):
165
"""
166
Update parameters using gradient descent
167
168
Arguments:
169
parameters -- python dictionary containing your parameters:
170
parameters['W' + str(i)] = Wi
171
parameters['b' + str(i)] = bi
172
grads -- python dictionary containing your gradients for each parameters:
173
grads['dW' + str(i)] = dWi
174
grads['db' + str(i)] = dbi
175
learning_rate -- the learning rate, scalar.
176
177
Returns:
178
parameters -- python dictionary containing your updated parameters
179
"""
180
181
n = len(parameters) // 2 # number of layers in the neural networks
182
183
# Update rule for each parameter
184
for k in range(n):
185
parameters["W" + str(k+1)] = parameters["W" + str(k+1)] - learning_rate * grads["dW" + str(k+1)]
186
parameters["b" + str(k+1)] = parameters["b" + str(k+1)] - learning_rate * grads["db" + str(k+1)]
187
188
return parameters
189
190
def predict(X, y, parameters):
191
"""
192
This function is used to predict the results of a n-layer neural network.
193
194
Arguments:
195
X -- data set of examples you would like to label
196
parameters -- parameters of the trained model
197
198
Returns:
199
p -- predictions for the given dataset X
200
"""
201
202
m = X.shape[1]
203
p = np.zeros((1,m), dtype = np.int)
204
205
# Forward propagation
206
a3, caches = forward_propagation(X, parameters)
207
208
# convert probas to 0/1 predictions
209
for i in range(0, a3.shape[1]):
210
if a3[0,i] > 0.5:
211
p[0,i] = 1
212
else:
213
p[0,i] = 0
214
215
# print results
216
217
#print ("predictions: " + str(p[0,:]))
218
#print ("true labels: " + str(y[0,:]))
219
print("Accuracy: " + str(np.mean((p[0,:] == y[0,:]))))
220
221
return p
222
223
def compute_cost(a3, Y):
224
"""
225
Implement the cost function
226
227
Arguments:
228
a3 -- post-activation, output of forward propagation
229
Y -- "true" labels vector, same shape as a3
230
231
Returns:
232
cost - value of the cost function
233
"""
234
m = Y.shape[1]
235
236
logprobs = np.multiply(-np.log(a3),Y) + np.multiply(-np.log(1 - a3), 1 - Y)
237
cost = 1./m * np.nansum(logprobs)
238
239
return cost
240
241
def load_dataset():
242
train_dataset = h5py.File('datasets/train_catvnoncat.h5', "r")
243
train_set_x_orig = np.array(train_dataset["train_set_x"][:]) # your train set features
244
train_set_y_orig = np.array(train_dataset["train_set_y"][:]) # your train set labels
245
246
test_dataset = h5py.File('datasets/test_catvnoncat.h5', "r")
247
test_set_x_orig = np.array(test_dataset["test_set_x"][:]) # your test set features
248
test_set_y_orig = np.array(test_dataset["test_set_y"][:]) # your test set labels
249
250
classes = np.array(test_dataset["list_classes"][:]) # the list of classes
251
252
train_set_y = train_set_y_orig.reshape((1, train_set_y_orig.shape[0]))
253
test_set_y = test_set_y_orig.reshape((1, test_set_y_orig.shape[0]))
254
255
train_set_x_orig = train_set_x_orig.reshape(train_set_x_orig.shape[0], -1).T
256
test_set_x_orig = test_set_x_orig.reshape(test_set_x_orig.shape[0], -1).T
257
258
train_set_x = train_set_x_orig/255
259
test_set_x = test_set_x_orig/255
260
261
return train_set_x, train_set_y, test_set_x, test_set_y, classes
262
263
264
def predict_dec(parameters, X):
265
"""
266
Used for plotting decision boundary.
267
268
Arguments:
269
parameters -- python dictionary containing your parameters
270
X -- input data of size (m, K)
271
272
Returns
273
predictions -- vector of predictions of our model (red: 0 / blue: 1)
274
"""
275
276
# Predict using forward propagation and a classification threshold of 0.5
277
a3, cache = forward_propagation(X, parameters)
278
predictions = (a3>0.5)
279
return predictions
280
281
def load_planar_dataset(randomness, seed):
282
283
np.random.seed(seed)
284
285
m = 50
286
N = int(m/2) # number of points per class
287
D = 2 # dimensionality
288
X = np.zeros((m,D)) # data matrix where each row is a single example
289
Y = np.zeros((m,1), dtype='uint8') # labels vector (0 for red, 1 for blue)
290
a = 2 # maximum ray of the flower
291
292
for j in range(2):
293
294
ix = range(N*j,N*(j+1))
295
if j == 0:
296
t = np.linspace(j, 4*3.1415*(j+1),N) #+ np.random.randn(N)*randomness # theta
297
r = 0.3*np.square(t) + np.random.randn(N)*randomness # radius
298
if j == 1:
299
t = np.linspace(j, 2*3.1415*(j+1),N) #+ np.random.randn(N)*randomness # theta
300
r = 0.2*np.square(t) + np.random.randn(N)*randomness # radius
301
302
X[ix] = np.c_[r*np.cos(t), r*np.sin(t)]
303
Y[ix] = j
304
305
X = X.T
306
Y = Y.T
307
308
return X, Y
309
310
def plot_decision_boundary(model, X, y):
311
# Set min and max values and give it some padding
312
x_min, x_max = X[0, :].min() - 1, X[0, :].max() + 1
313
y_min, y_max = X[1, :].min() - 1, X[1, :].max() + 1
314
h = 0.01
315
# Generate a grid of points with distance h between them
316
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
317
# Predict the function value for the whole grid
318
Z = model(np.c_[xx.ravel(), yy.ravel()])
319
Z = Z.reshape(xx.shape)
320
# Plot the contour and training examples
321
plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral)
322
plt.ylabel('x2')
323
plt.xlabel('x1')
324
plt.scatter(X[0, :], X[1, :], c=y, cmap=plt.cm.Spectral)
325
plt.show()
326
327
def load_2D_dataset():
328
data = scipy.io.loadmat('datasets/data.mat')
329
train_X = data['X'].T
330
train_Y = data['y'].T
331
test_X = data['Xval'].T
332
test_Y = data['yval'].T
333
334
plt.scatter(train_X[0, :], train_X[1, :], c=train_Y, s=40, cmap=plt.cm.Spectral);
335
336
return train_X, train_Y, test_X, test_Y
337