Path: blob/main/C1 - Supervised Machine Learning - Regression and Classification/week3/C1W3A1/utils.py
3748 views
import numpy as np1import matplotlib.pyplot as plt23def load_data(filename):4data = np.loadtxt(filename, delimiter=',')5X = data[:,:2]6y = data[:,2]7return X, y89def sig(z):1011return 1/(1+np.exp(-z))1213def map_feature(X1, X2):14"""15Feature mapping function to polynomial features16"""17X1 = np.atleast_1d(X1)18X2 = np.atleast_1d(X2)19degree = 620out = []21for i in range(1, degree+1):22for j in range(i + 1):23out.append((X1**(i-j) * (X2**j)))24return np.stack(out, axis=1)252627def plot_data(X, y, pos_label="y=1", neg_label="y=0"):28positive = y == 129negative = y == 03031# Plot examples32plt.plot(X[positive, 0], X[positive, 1], 'k+', label=pos_label)33plt.plot(X[negative, 0], X[negative, 1], 'yo', label=neg_label)343536def plot_decision_boundary(w, b, X, y):37# Credit to dibgerge on Github for this plotting code3839plot_data(X[:, 0:2], y)4041if X.shape[1] <= 2:42plot_x = np.array([min(X[:, 0]), max(X[:, 0])])43plot_y = (-1. / w[1]) * (w[0] * plot_x + b)4445plt.plot(plot_x, plot_y, c="b")4647else:48u = np.linspace(-1, 1.5, 50)49v = np.linspace(-1, 1.5, 50)5051z = np.zeros((len(u), len(v)))5253# Evaluate z = theta*x over the grid54for i in range(len(u)):55for j in range(len(v)):56z[i,j] = sig(np.dot(map_feature(u[i], v[j]), w) + b)5758# important to transpose z before calling contour59z = z.T6061# Plot z = 062plt.contour(u,v,z, levels = [0.5], colors="g")63646566