CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
amanchadha

Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.

GitHub Repository: amanchadha/coursera-deep-learning-specialization
Path: blob/master/C2 - Improving Deep Neural Networks Hyperparameter tuning, Regularization and Optimization/Week 3/tf_utils.py
Views: 4804
1
import h5py
2
import numpy as np
3
import tensorflow as tf
4
import math
5
6
def load_dataset():
7
train_dataset = h5py.File('datasets/train_signs.h5', "r")
8
train_set_x_orig = np.array(train_dataset["train_set_x"][:]) # your train set features
9
train_set_y_orig = np.array(train_dataset["train_set_y"][:]) # your train set labels
10
11
test_dataset = h5py.File('datasets/test_signs.h5', "r")
12
test_set_x_orig = np.array(test_dataset["test_set_x"][:]) # your test set features
13
test_set_y_orig = np.array(test_dataset["test_set_y"][:]) # your test set labels
14
15
classes = np.array(test_dataset["list_classes"][:]) # the list of classes
16
17
train_set_y_orig = train_set_y_orig.reshape((1, train_set_y_orig.shape[0]))
18
test_set_y_orig = test_set_y_orig.reshape((1, test_set_y_orig.shape[0]))
19
20
return train_set_x_orig, train_set_y_orig, test_set_x_orig, test_set_y_orig, classes
21
22
23
def random_mini_batches(X, Y, mini_batch_size = 64, seed = 0):
24
"""
25
Creates a list of random minibatches from (X, Y)
26
27
Arguments:
28
X -- input data, of shape (input size, number of examples)
29
Y -- true "label" vector (containing 0 if cat, 1 if non-cat), of shape (1, number of examples)
30
mini_batch_size - size of the mini-batches, integer
31
seed -- this is only for the purpose of grading, so that you're "random minibatches are the same as ours.
32
33
Returns:
34
mini_batches -- list of synchronous (mini_batch_X, mini_batch_Y)
35
"""
36
37
m = X.shape[1] # number of training examples
38
mini_batches = []
39
np.random.seed(seed)
40
41
# Step 1: Shuffle (X, Y)
42
permutation = list(np.random.permutation(m))
43
shuffled_X = X[:, permutation]
44
shuffled_Y = Y[:, permutation].reshape((Y.shape[0],m))
45
46
# Step 2: Partition (shuffled_X, shuffled_Y). Minus the end case.
47
num_complete_minibatches = math.floor(m/mini_batch_size) # number of mini batches of size mini_batch_size in your partitionning
48
for k in range(0, num_complete_minibatches):
49
mini_batch_X = shuffled_X[:, k * mini_batch_size : k * mini_batch_size + mini_batch_size]
50
mini_batch_Y = shuffled_Y[:, k * mini_batch_size : k * mini_batch_size + mini_batch_size]
51
mini_batch = (mini_batch_X, mini_batch_Y)
52
mini_batches.append(mini_batch)
53
54
# Handling the end case (last mini-batch < mini_batch_size)
55
if m % mini_batch_size != 0:
56
mini_batch_X = shuffled_X[:, num_complete_minibatches * mini_batch_size : m]
57
mini_batch_Y = shuffled_Y[:, num_complete_minibatches * mini_batch_size : m]
58
mini_batch = (mini_batch_X, mini_batch_Y)
59
mini_batches.append(mini_batch)
60
61
return mini_batches
62
63
def convert_to_one_hot(Y, C):
64
Y = np.eye(C)[Y.reshape(-1)].T
65
return Y
66
67
68
def predict(X, parameters):
69
70
W1 = tf.convert_to_tensor(parameters["W1"])
71
b1 = tf.convert_to_tensor(parameters["b1"])
72
W2 = tf.convert_to_tensor(parameters["W2"])
73
b2 = tf.convert_to_tensor(parameters["b2"])
74
W3 = tf.convert_to_tensor(parameters["W3"])
75
b3 = tf.convert_to_tensor(parameters["b3"])
76
77
params = {"W1": W1,
78
"b1": b1,
79
"W2": W2,
80
"b2": b2,
81
"W3": W3,
82
"b3": b3}
83
84
x = tf.placeholder("float", [12288, 1])
85
86
z3 = forward_propagation_for_predict(x, params)
87
p = tf.argmax(z3)
88
89
sess = tf.Session()
90
prediction = sess.run(p, feed_dict = {x: X})
91
92
return prediction
93
94
def forward_propagation_for_predict(X, parameters):
95
"""
96
Implements the forward propagation for the model: LINEAR -> RELU -> LINEAR -> RELU -> LINEAR -> SOFTMAX
97
98
Arguments:
99
X -- input dataset placeholder, of shape (input size, number of examples)
100
parameters -- python dictionary containing your parameters "W1", "b1", "W2", "b2", "W3", "b3"
101
the shapes are given in initialize_parameters
102
103
Returns:
104
Z3 -- the output of the last LINEAR unit
105
"""
106
107
# Retrieve the parameters from the dictionary "parameters"
108
W1 = parameters['W1']
109
b1 = parameters['b1']
110
W2 = parameters['W2']
111
b2 = parameters['b2']
112
W3 = parameters['W3']
113
b3 = parameters['b3']
114
# Numpy Equivalents:
115
Z1 = tf.add(tf.matmul(W1, X), b1) # Z1 = np.dot(W1, X) + b1
116
A1 = tf.nn.relu(Z1) # A1 = relu(Z1)
117
Z2 = tf.add(tf.matmul(W2, A1), b2) # Z2 = np.dot(W2, a1) + b2
118
A2 = tf.nn.relu(Z2) # A2 = relu(Z2)
119
Z3 = tf.add(tf.matmul(W3, A2), b3) # Z3 = np.dot(W3,Z2) + b3
120
121
return Z3
122
123