Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.
Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.
Path: blob/master/C2 - Improving Deep Neural Networks Hyperparameter tuning, Regularization and Optimization/Week 2/opt_utils.py
Views: 4802
import numpy as np1import matplotlib.pyplot as plt2import h5py3import scipy.io4import sklearn5import sklearn.datasets67def sigmoid(x):8"""9Compute the sigmoid of x1011Arguments:12x -- A scalar or numpy array of any size.1314Return:15s -- sigmoid(x)16"""17s = 1/(1+np.exp(-x))18return s1920def relu(x):21"""22Compute the relu of x2324Arguments:25x -- A scalar or numpy array of any size.2627Return:28s -- relu(x)29"""30s = np.maximum(0,x)3132return s3334def load_params_and_grads(seed=1):35np.random.seed(seed)36W1 = np.random.randn(2,3)37b1 = np.random.randn(2,1)38W2 = np.random.randn(3,3)39b2 = np.random.randn(3,1)4041dW1 = np.random.randn(2,3)42db1 = np.random.randn(2,1)43dW2 = np.random.randn(3,3)44db2 = np.random.randn(3,1)4546return W1, b1, W2, b2, dW1, db1, dW2, db2474849def initialize_parameters(layer_dims):50"""51Arguments:52layer_dims -- python array (list) containing the dimensions of each layer in our network5354Returns:55parameters -- python dictionary containing your parameters "W1", "b1", ..., "WL", "bL":56W1 -- weight matrix of shape (layer_dims[l], layer_dims[l-1])57b1 -- bias vector of shape (layer_dims[l], 1)58Wl -- weight matrix of shape (layer_dims[l-1], layer_dims[l])59bl -- bias vector of shape (1, layer_dims[l])6061Tips:62- For example: the layer_dims for the "Planar Data classification model" would have been [2,2,1].63This means W1's shape was (2,2), b1 was (1,2), W2 was (2,1) and b2 was (1,1). Now you have to generalize it!64- In the for loop, use parameters['W' + str(l)] to access Wl, where l is the iterative integer.65"""6667np.random.seed(3)68parameters = {}69L = len(layer_dims) # number of layers in the network7071for l in range(1, L):72parameters['W' + str(l)] = np.random.randn(layer_dims[l], layer_dims[l-1])* np.sqrt(2 / layer_dims[l-1])73parameters['b' + str(l)] = np.zeros((layer_dims[l], 1))7475assert(parameters['W' + str(l)].shape == layer_dims[l], layer_dims[l-1])76assert(parameters['W' + str(l)].shape == layer_dims[l], 1)7778return parameters798081def compute_cost(a3, Y):8283"""84Implement the cost function8586Arguments:87a3 -- post-activation, output of forward propagation88Y -- "true" labels vector, same shape as a38990Returns:91cost - value of the cost function92"""93m = Y.shape[1]9495logprobs = np.multiply(-np.log(a3),Y) + np.multiply(-np.log(1 - a3), 1 - Y)96cost = 1./m * np.sum(logprobs)9798return cost99100def forward_propagation(X, parameters):101"""102Implements the forward propagation (and computes the loss) presented in Figure 2.103104Arguments:105X -- input dataset, of shape (input size, number of examples)106parameters -- python dictionary containing your parameters "W1", "b1", "W2", "b2", "W3", "b3":107W1 -- weight matrix of shape ()108b1 -- bias vector of shape ()109W2 -- weight matrix of shape ()110b2 -- bias vector of shape ()111W3 -- weight matrix of shape ()112b3 -- bias vector of shape ()113114Returns:115loss -- the loss function (vanilla logistic loss)116"""117118# retrieve parameters119W1 = parameters["W1"]120b1 = parameters["b1"]121W2 = parameters["W2"]122b2 = parameters["b2"]123W3 = parameters["W3"]124b3 = parameters["b3"]125126# LINEAR -> RELU -> LINEAR -> RELU -> LINEAR -> SIGMOID127z1 = np.dot(W1, X) + b1128a1 = relu(z1)129z2 = np.dot(W2, a1) + b2130a2 = relu(z2)131z3 = np.dot(W3, a2) + b3132a3 = sigmoid(z3)133134cache = (z1, a1, W1, b1, z2, a2, W2, b2, z3, a3, W3, b3)135136return a3, cache137138def backward_propagation(X, Y, cache):139"""140Implement the backward propagation presented in figure 2.141142Arguments:143X -- input dataset, of shape (input size, number of examples)144Y -- true "label" vector (containing 0 if cat, 1 if non-cat)145cache -- cache output from forward_propagation()146147Returns:148gradients -- A dictionary with the gradients with respect to each parameter, activation and pre-activation variables149"""150m = X.shape[1]151(z1, a1, W1, b1, z2, a2, W2, b2, z3, a3, W3, b3) = cache152153dz3 = 1./m * (a3 - Y)154dW3 = np.dot(dz3, a2.T)155db3 = np.sum(dz3, axis=1, keepdims = True)156157da2 = np.dot(W3.T, dz3)158dz2 = np.multiply(da2, np.int64(a2 > 0))159dW2 = np.dot(dz2, a1.T)160db2 = np.sum(dz2, axis=1, keepdims = True)161162da1 = np.dot(W2.T, dz2)163dz1 = np.multiply(da1, np.int64(a1 > 0))164dW1 = np.dot(dz1, X.T)165db1 = np.sum(dz1, axis=1, keepdims = True)166167gradients = {"dz3": dz3, "dW3": dW3, "db3": db3,168"da2": da2, "dz2": dz2, "dW2": dW2, "db2": db2,169"da1": da1, "dz1": dz1, "dW1": dW1, "db1": db1}170171return gradients172173def predict(X, y, parameters):174"""175This function is used to predict the results of a n-layer neural network.176177Arguments:178X -- data set of examples you would like to label179parameters -- parameters of the trained model180181Returns:182p -- predictions for the given dataset X183"""184185m = X.shape[1]186p = np.zeros((1,m), dtype = np.int)187188# Forward propagation189a3, caches = forward_propagation(X, parameters)190191# convert probas to 0/1 predictions192for i in range(0, a3.shape[1]):193if a3[0,i] > 0.5:194p[0,i] = 1195else:196p[0,i] = 0197198# print results199200#print ("predictions: " + str(p[0,:]))201#print ("true labels: " + str(y[0,:]))202print("Accuracy: " + str(np.mean((p[0,:] == y[0,:]))))203204return p205206def load_2D_dataset():207data = scipy.io.loadmat('datasets/data.mat')208train_X = data['X'].T209train_Y = data['y'].T210test_X = data['Xval'].T211test_Y = data['yval'].T212213plt.scatter(train_X[0, :], train_X[1, :], c=train_Y, s=40, cmap=plt.cm.Spectral);214215return train_X, train_Y, test_X, test_Y216217def plot_decision_boundary(model, X, y):218# Set min and max values and give it some padding219x_min, x_max = X[0, :].min() - 1, X[0, :].max() + 1220y_min, y_max = X[1, :].min() - 1, X[1, :].max() + 1221h = 0.01222# Generate a grid of points with distance h between them223xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))224# Predict the function value for the whole grid225Z = model(np.c_[xx.ravel(), yy.ravel()])226Z = Z.reshape(xx.shape)227# Plot the contour and training examples228plt.contourf(xx, yy, Z, cmap=plt.cm.Spectral)229plt.ylabel('x2')230plt.xlabel('x1')231plt.scatter(X[0, :], X[1, :], c=y, cmap=plt.cm.Spectral)232plt.show()233234def predict_dec(parameters, X):235"""236Used for plotting decision boundary.237238Arguments:239parameters -- python dictionary containing your parameters240X -- input data of size (m, K)241242Returns243predictions -- vector of predictions of our model (red: 0 / blue: 1)244"""245246# Predict using forward propagation and a classification threshold of 0.5247a3, cache = forward_propagation(X, parameters)248predictions = (a3 > 0.5)249return predictions250251def load_dataset():252np.random.seed(3)253train_X, train_Y = sklearn.datasets.make_moons(n_samples=300, noise=.2) #300 #0.2254# Visualize the data255plt.scatter(train_X[:, 0], train_X[:, 1], c=train_Y, s=40, cmap=plt.cm.Spectral);256train_X = train_X.T257train_Y = train_Y.reshape((1, train_Y.shape[0]))258259return train_X, train_Y260261