Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
greyhatguy007
GitHub Repository: greyhatguy007/Machine-Learning-Specialization-Coursera
Path: blob/main/C1 - Supervised Machine Learning - Regression and Classification/week3/C1W3A1/utils.py
3748 views
1
import numpy as np
2
import matplotlib.pyplot as plt
3
4
def load_data(filename):
5
data = np.loadtxt(filename, delimiter=',')
6
X = data[:,:2]
7
y = data[:,2]
8
return X, y
9
10
def sig(z):
11
12
return 1/(1+np.exp(-z))
13
14
def map_feature(X1, X2):
15
"""
16
Feature mapping function to polynomial features
17
"""
18
X1 = np.atleast_1d(X1)
19
X2 = np.atleast_1d(X2)
20
degree = 6
21
out = []
22
for i in range(1, degree+1):
23
for j in range(i + 1):
24
out.append((X1**(i-j) * (X2**j)))
25
return np.stack(out, axis=1)
26
27
28
def plot_data(X, y, pos_label="y=1", neg_label="y=0"):
29
positive = y == 1
30
negative = y == 0
31
32
# Plot examples
33
plt.plot(X[positive, 0], X[positive, 1], 'k+', label=pos_label)
34
plt.plot(X[negative, 0], X[negative, 1], 'yo', label=neg_label)
35
36
37
def plot_decision_boundary(w, b, X, y):
38
# Credit to dibgerge on Github for this plotting code
39
40
plot_data(X[:, 0:2], y)
41
42
if X.shape[1] <= 2:
43
plot_x = np.array([min(X[:, 0]), max(X[:, 0])])
44
plot_y = (-1. / w[1]) * (w[0] * plot_x + b)
45
46
plt.plot(plot_x, plot_y, c="b")
47
48
else:
49
u = np.linspace(-1, 1.5, 50)
50
v = np.linspace(-1, 1.5, 50)
51
52
z = np.zeros((len(u), len(v)))
53
54
# Evaluate z = theta*x over the grid
55
for i in range(len(u)):
56
for j in range(len(v)):
57
z[i,j] = sig(np.dot(map_feature(u[i], v[j]), w) + b)
58
59
# important to transpose z before calling contour
60
z = z.T
61
62
# Plot z = 0
63
plt.contour(u,v,z, levels = [0.5], colors="g")
64
65
66