Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.
Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.
Path: blob/master/Improving Deep Neural Networks Hyperparameter tuning, Regularization and Optimization/week5/Regularization/reg_utils.py
Views: 13377
import numpy as np1import matplotlib.pyplot as plt2import h5py3import sklearn4import sklearn.datasets5import sklearn.linear_model6import scipy.io78def sigmoid(x):9"""10Compute the sigmoid of x1112Arguments:13x -- A scalar or numpy array of any size.1415Return:16s -- sigmoid(x)17"""18s = 1/(1+np.exp(-x))19return s2021def relu(x):22"""23Compute the relu of x2425Arguments:26x -- A scalar or numpy array of any size.2728Return:29s -- relu(x)30"""31s = np.maximum(0,x)3233return s3435def load_planar_dataset(seed):3637np.random.seed(seed)3839m = 400 # number of examples40N = int(m/2) # number of points per class41D = 2 # dimensionality42X = np.zeros((m,D)) # data matrix where each row is a single example43Y = np.zeros((m,1), dtype='uint8') # labels vector (0 for red, 1 for blue)44a = 4 # maximum ray of the flower4546for j in range(2):47ix = range(N*j,N*(j+1))48t = np.linspace(j*3.12,(j+1)*3.12,N) + np.random.randn(N)*0.2 # theta49r = a*np.sin(4*t) + np.random.randn(N)*0.2 # radius50X[ix] = np.c_[r*np.sin(t), r*np.cos(t)]51Y[ix] = j5253X = X.T54Y = Y.T5556return X, Y5758def initialize_parameters(layer_dims):59"""60Arguments:61layer_dims -- python array (list) containing the dimensions of each layer in our network6263Returns:64parameters -- python dictionary containing your parameters "W1", "b1", ..., "WL", "bL":65W1 -- weight matrix of shape (layer_dims[l], layer_dims[l-1])66b1 -- bias vector of shape (layer_dims[l], 1)67Wl -- weight matrix of shape (layer_dims[l-1], layer_dims[l])68bl -- bias vector of shape (1, layer_dims[l])6970Tips:71- For example: the layer_dims for the "Planar Data classification model" would have been [2,2,1].72This means W1's shape was (2,2), b1 was (1,2), W2 was (2,1) and b2 was (1,1). Now you have to generalize it!73- In the for loop, use parameters['W' + str(l)] to access Wl, where l is the iterative integer.74"""7576np.random.seed(3)77parameters = {}78L = len(layer_dims) # number of layers in the network7980for l in range(1, L):81parameters['W' + str(l)] = np.random.randn(layer_dims[l], layer_dims[l-1]) / np.sqrt(layer_dims[l-1])82parameters['b' + str(l)] = np.zeros((layer_dims[l], 1))8384assert(parameters['W' + str(l)].shape == (layer_dims[l], layer_dims[l-1]))85assert(parameters['b' + str(l)].shape == (layer_dims[l], 1))868788return parameters8990def forward_propagation(X, parameters):91"""92Implements the forward propagation (and computes the loss) presented in Figure 2.9394Arguments:95X -- input dataset, of shape (input size, number of examples)96parameters -- python dictionary containing your parameters "W1", "b1", "W2", "b2", "W3", "b3":97W1 -- weight matrix of shape ()98b1 -- bias vector of shape ()99W2 -- weight matrix of shape ()100b2 -- bias vector of shape ()101W3 -- weight matrix of shape ()102b3 -- bias vector of shape ()103104Returns:105loss -- the loss function (vanilla logistic loss)106"""107108# retrieve parameters109W1 = parameters["W1"]110b1 = parameters["b1"]111W2 = parameters["W2"]112b2 = parameters["b2"]113W3 = parameters["W3"]114b3 = parameters["b3"]115116# LINEAR -> RELU -> LINEAR -> RELU -> LINEAR -> SIGMOID117Z1 = np.dot(W1, X) + b1118A1 = relu(Z1)119Z2 = np.dot(W2, A1) + b2120A2 = relu(Z2)121Z3 = np.dot(W3, A2) + b3122A3 = sigmoid(Z3)123124cache = (Z1, A1, W1, b1, Z2, A2, W2, b2, Z3, A3, W3, b3)125126return A3, cache127128def backward_propagation(X, Y, cache):129"""130Implement the backward propagation presented in figure 2.131132Arguments:133X -- input dataset, of shape (input size, number of examples)134Y -- true "label" vector (containing 0 if cat, 1 if non-cat)135cache -- cache output from forward_propagation()136137Returns:138gradients -- A dictionary with the gradients with respect to each parameter, activation and pre-activation variables139"""140m = X.shape[1]141(Z1, A1, W1, b1, Z2, A2, W2, b2, Z3, A3, W3, b3) = cache142143dZ3 = A3 - Y144dW3 = 1./m * np.dot(dZ3, A2.T)145db3 = 1./m * np.sum(dZ3, axis=1, keepdims = True)146147dA2 = np.dot(W3.T, dZ3)148dZ2 = np.multiply(dA2, np.int64(A2 > 0))149dW2 = 1./m * np.dot(dZ2, A1.T)150db2 = 1./m * np.sum(dZ2, axis=1, keepdims = True)151152dA1 = np.dot(W2.T, dZ2)153dZ1 = np.multiply(dA1, np.int64(A1 > 0))154dW1 = 1./m * np.dot(dZ1, X.T)155db1 = 1./m * np.sum(dZ1, axis=1, keepdims = True)156157gradients = {"dZ3": dZ3, "dW3": dW3, "db3": db3,158"dA2": dA2, "dZ2": dZ2, "dW2": dW2, "db2": db2,159"dA1": dA1, "dZ1": dZ1, "dW1": dW1, "db1": db1}160161return gradients162163def update_parameters(parameters, grads, learning_rate):164"""165Update parameters using gradient descent166167Arguments:168parameters -- python dictionary containing your parameters:169parameters['W' + str(i)] = Wi170parameters['b' + str(i)] = bi171grads -- python dictionary containing your gradients for each parameters:172grads['dW' + str(i)] = dWi173grads['db' + str(i)] = dbi174learning_rate -- the learning rate, scalar.175176Returns:177parameters -- python dictionary containing your updated parameters178"""179180n = len(parameters) // 2 # number of layers in the neural networks181182# Update rule for each parameter183for k in range(n):184parameters["W" + str(k+1)] = parameters["W" + str(k+1)] - learning_rate * grads["dW" + str(k+1)]185parameters["b" + str(k+1)] = parameters["b" + str(k+1)] - learning_rate * grads["db" + str(k+1)]186187return parameters188189def predict(X, y, parameters):190"""191This function is used to predict the results of a n-layer neural network.192193Arguments:194X -- data set of examples you would like to label195parameters -- parameters of the trained model196197Returns:198p -- predictions for the given dataset X199"""200201m = X.shape[1]202p = np.zeros((1,m), dtype = np.int)203204# Forward propagation205a3, caches = forward_propagation(X, parameters)206207# convert probas to 0/1 predictions208for i in range(0, a3.shape[1]):209if a3[0,i] > 0.5:210p[0,i] = 1211else:212p[0,i] = 0213214# print results215216#print ("predictions: " + str(p[0,:]))217#print ("true labels: " + str(y[0,:]))218print("Accuracy: " + str(np.mean((p[0,:] == y[0,:]))))219220return p221222def compute_cost(a3, Y):223"""224Implement the cost function225226Arguments:227a3 -- post-activation, output of forward propagation228Y -- "true" labels vector, same shape as a3229230Returns:231cost - value of the cost function232"""233m = Y.shape[1]234235logprobs = np.multiply(-np.log(a3),Y) + np.multiply(-np.log(1 - a3), 1 - Y)236cost = 1./m * np.nansum(logprobs)237238return cost239240def load_dataset():241train_dataset = h5py.File('datasets/train_catvnoncat.h5', "r")242train_set_x_orig = np.array(train_dataset["train_set_x"][:]) # your train set features243train_set_y_orig = np.array(train_dataset["train_set_y"][:]) # your train set labels244245test_dataset = h5py.File('datasets/test_catvnoncat.h5', "r")246test_set_x_orig = np.array(test_dataset["test_set_x"][:]) # your test set features247test_set_y_orig = np.array(test_dataset["test_set_y"][:]) # your test set labels248249classes = np.array(test_dataset["list_classes"][:]) # the list of classes250251train_set_y = train_set_y_orig.reshape((1, train_set_y_orig.shape[0]))252test_set_y = test_set_y_orig.reshape((1, test_set_y_orig.shape[0]))253254train_set_x_orig = train_set_x_orig.reshape(train_set_x_orig.shape[0], -1).T255test_set_x_orig = test_set_x_orig.reshape(test_set_x_orig.shape[0], -1).T256257train_set_x = train_set_x_orig/255258test_set_x = test_set_x_orig/255259260return train_set_x, train_set_y, test_set_x, test_set_y, classes261262263def predict_dec(parameters, X):264"""265Used for plotting decision boundary.266267Arguments:268parameters -- python dictionary containing your parameters269X -- input data of size (m, K)270271Returns272predictions -- vector of predictions of our model (red: 0 / blue: 1)273"""274275# Predict using forward propagation and a classification threshold of 0.5276a3, cache = forward_propagation(X, parameters)277predictions = (a3>0.5)278return predictions279280def load_planar_dataset(randomness, seed):281282np.random.seed(seed)283284m = 50285N = int(m/2) # number of points per class286D = 2 # dimensionality287X = np.zeros((m,D)) # data matrix where each row is a single example288Y = np.zeros((m,1), dtype='uint8') # labels vector (0 for red, 1 for blue)289a = 2 # maximum ray of the flower290291for j in range(2):292293ix = range(N*j,N*(j+1))294if j == 0:295t = np.linspace(j, 4*3.1415*(j+1),N) #+ np.random.randn(N)*randomness # theta296r = 0.3*np.square(t) + np.random.randn(N)*randomness # radius297if j == 1:298t = np.linspace(j, 2*3.1415*(j+1),N) #+ np.random.randn(N)*randomness # theta299r = 0.2*np.square(t) + np.random.randn(N)*randomness # radius300301X[ix] = np.c_[r*np.cos(t), r*np.sin(t)]302Y[ix] = j303304X = X.T305Y = Y.T306307return X, Y308309def plot_decision_boundary(model, X, y):310# Set min and max values and give it some padding311x_min, x_max = X[0, :].min() - 1, X[0, :].max() + 1312y_min, y_max = X[1, :].min() - 1, X[1, :].max() + 1313h = 0.01314# Generate a grid of points with distance h between them315xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))316# Predict the function value for the whole grid317Z = model(np.c_[xx.ravel(), yy.ravel()])318Z = Z.reshape(xx.shape)319# Plot the contour and training examples320plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral)321plt.ylabel('x2')322plt.xlabel('x1')323plt.scatter(X[0, :], X[1, :], c=y, cmap=plt.cm.Spectral)324plt.show()325326def load_2D_dataset():327data = scipy.io.loadmat('datasets/data.mat')328train_X = data['X'].T329train_Y = data['y'].T330test_X = data['Xval'].T331test_Y = data['yval'].T332333plt.scatter(train_X[0, :], train_X[1, :], c=train_Y, s=40, cmap=plt.cm.Spectral);334335return train_X, train_Y, test_X, test_Y336337