Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
jxareas
GitHub Repository: jxareas/Machine-Learning-Notebooks
Path: blob/master/1_Supervised_Machine_Learning/Week 3. Classification/plt_logistic_loss.py
2826 views
1
"""----------------------------------------------------------------
2
logistic_loss plotting routines and support
3
"""
4
5
from matplotlib import cm
6
from lab_utils_common import sigmoid, dlblue, dlorange, np, plt, compute_cost_matrix
7
8
def compute_cost_logistic_sq_err(X, y, w, b):
9
"""
10
compute sq error cost on logicist data (for negative example only, not used in practice)
11
Args:
12
X (ndarray): Shape (m,n) matrix of examples with multiple features
13
w (ndarray): Shape (n) parameters for prediction
14
b (scalar): parameter for prediction
15
Returns:
16
cost (scalar): cost
17
"""
18
m = X.shape[0]
19
cost = 0.0
20
for i in range(m):
21
z_i = np.dot(X[i],w) + b
22
f_wb_i = sigmoid(z_i) #add sigmoid to normal sq error cost for linear regression
23
cost = cost + (f_wb_i - y[i])**2
24
cost = cost / (2 * m)
25
return np.squeeze(cost)
26
27
def plt_logistic_squared_error(X,y):
28
""" plots logistic squared error for demonstration """
29
wx, by = np.meshgrid(np.linspace(-6,12,50),
30
np.linspace(10, -20, 40))
31
points = np.c_[wx.ravel(), by.ravel()]
32
cost = np.zeros(points.shape[0])
33
34
for i in range(points.shape[0]):
35
w,b = points[i]
36
cost[i] = compute_cost_logistic_sq_err(X.reshape(-1,1), y, w, b)
37
cost = cost.reshape(wx.shape)
38
39
fig = plt.figure()
40
fig.canvas.toolbar_visible = False
41
fig.canvas.header_visible = False
42
fig.canvas.footer_visible = False
43
ax = fig.add_subplot(1, 1, 1, projection='3d')
44
ax.plot_surface(wx, by, cost, alpha=0.6,cmap=cm.jet,)
45
46
ax.set_xlabel('w', fontsize=16)
47
ax.set_ylabel('b', fontsize=16)
48
ax.set_zlabel("Cost", rotation=90, fontsize=16)
49
ax.set_title('"Logistic" Squared Error Cost vs (w, b)')
50
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
51
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
52
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
53
54
55
def plt_logistic_cost(X,y):
56
""" plots logistic cost """
57
wx, by = np.meshgrid(np.linspace(-6,12,50),
58
np.linspace(0, -20, 40))
59
points = np.c_[wx.ravel(), by.ravel()]
60
cost = np.zeros(points.shape[0],dtype=np.longdouble)
61
62
for i in range(points.shape[0]):
63
w,b = points[i]
64
cost[i] = compute_cost_matrix(X.reshape(-1,1), y, w, b, logistic=True, safe=True)
65
cost = cost.reshape(wx.shape)
66
67
fig = plt.figure(figsize=(9,5))
68
fig.canvas.toolbar_visible = False
69
fig.canvas.header_visible = False
70
fig.canvas.footer_visible = False
71
ax = fig.add_subplot(1, 2, 1, projection='3d')
72
ax.plot_surface(wx, by, cost, alpha=0.6,cmap=cm.jet,)
73
74
ax.set_xlabel('w', fontsize=16)
75
ax.set_ylabel('b', fontsize=16)
76
ax.set_zlabel("Cost", rotation=90, fontsize=16)
77
ax.set_title('Logistic Cost vs (w, b)')
78
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
79
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
80
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
81
82
ax = fig.add_subplot(1, 2, 2, projection='3d')
83
84
ax.plot_surface(wx, by, np.log(cost), alpha=0.6,cmap=cm.jet,)
85
86
ax.set_xlabel('w', fontsize=16)
87
ax.set_ylabel('b', fontsize=16)
88
ax.set_zlabel('\nlog(Cost)', fontsize=16)
89
ax.set_title('log(Logistic Cost) vs (w, b)')
90
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
91
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
92
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
93
94
plt.show()
95
return cost
96
97
98
def soup_bowl():
99
""" creates 3D quadratic error surface """
100
#Create figure and plot with a 3D projection
101
fig = plt.figure(figsize=(4,4))
102
fig.canvas.toolbar_visible = False
103
fig.canvas.header_visible = False
104
fig.canvas.footer_visible = False
105
106
#Plot configuration
107
ax = fig.add_subplot(111, projection='3d')
108
ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
109
ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
110
ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 0.0))
111
ax.zaxis.set_rotate_label(False)
112
ax.view_init(15, -120)
113
114
#Useful linearspaces to give values to the parameters w and b
115
w = np.linspace(-20, 20, 100)
116
b = np.linspace(-20, 20, 100)
117
118
#Get the z value for a bowl-shaped cost function
119
z=np.zeros((len(w), len(b)))
120
j=0
121
for x in w:
122
i=0
123
for y in b:
124
z[i,j] = x**2 + y**2
125
i+=1
126
j+=1
127
128
#Meshgrid used for plotting 3D functions
129
W, B = np.meshgrid(w, b)
130
131
#Create the 3D surface plot of the bowl-shaped cost function
132
ax.plot_surface(W, B, z, cmap = "Spectral_r", alpha=0.7, antialiased=False)
133
ax.plot_wireframe(W, B, z, color='k', alpha=0.1)
134
ax.set_xlabel("$w$")
135
ax.set_ylabel("$b$")
136
ax.set_zlabel("Cost", rotation=90)
137
ax.set_title("Squared Error Cost used in Linear Regression")
138
139
plt.show()
140
141
142
def plt_simple_example(x, y):
143
""" plots tumor data """
144
pos = y == 1
145
neg = y == 0
146
147
fig,ax = plt.subplots(1,1,figsize=(5,3))
148
fig.canvas.toolbar_visible = False
149
fig.canvas.header_visible = False
150
fig.canvas.footer_visible = False
151
152
ax.scatter(x[pos], y[pos], marker='x', s=80, c = 'red', label="malignant")
153
ax.scatter(x[neg], y[neg], marker='o', s=100, label="benign", facecolors='none', edgecolors=dlblue,lw=3)
154
ax.set_ylim(-0.075,1.1)
155
ax.set_ylabel('y')
156
ax.set_xlabel('Tumor Size')
157
ax.legend(loc='lower right')
158
ax.set_title("Example of Logistic Regression on Categorical Data")
159
160
161
def plt_two_logistic_loss_curves():
162
""" plots the logistic loss """
163
fig,ax = plt.subplots(1,2,figsize=(6,3),sharey=True)
164
fig.canvas.toolbar_visible = False
165
fig.canvas.header_visible = False
166
fig.canvas.footer_visible = False
167
x = np.linspace(0.01,1-0.01,20)
168
ax[0].plot(x,-np.log(x))
169
#ax[0].set_title("y = 1")
170
ax[0].text(0.5, 4.0, "y = 1", fontsize=12)
171
ax[0].set_ylabel("loss")
172
ax[0].set_xlabel(r"$f_{w,b}(x)$")
173
ax[1].plot(x,-np.log(1-x))
174
#ax[1].set_title("y = 0")
175
ax[1].text(0.5, 4.0, "y = 0", fontsize=12)
176
ax[1].set_xlabel(r"$f_{w,b}(x)$")
177
ax[0].annotate("prediction \nmatches \ntarget ", xy= [1,0], xycoords='data',
178
xytext=[-10,30],textcoords='offset points', ha="right", va="center",
179
arrowprops={'arrowstyle': '->', 'color': dlorange, 'lw': 3},)
180
ax[0].annotate("loss increases as prediction\n differs from target", xy= [0.1,-np.log(0.1)], xycoords='data',
181
xytext=[10,30],textcoords='offset points', ha="left", va="center",
182
arrowprops={'arrowstyle': '->', 'color': dlorange, 'lw': 3},)
183
ax[1].annotate("prediction \nmatches \ntarget ", xy= [0,0], xycoords='data',
184
xytext=[10,30],textcoords='offset points', ha="left", va="center",
185
arrowprops={'arrowstyle': '->', 'color': dlorange, 'lw': 3},)
186
ax[1].annotate("loss increases as prediction\n differs from target", xy= [0.9,-np.log(1-0.9)], xycoords='data',
187
xytext=[-10,30],textcoords='offset points', ha="right", va="center",
188
arrowprops={'arrowstyle': '->', 'color': dlorange, 'lw': 3},)
189
plt.suptitle("Loss Curves for Two Categorical Target Values", fontsize=12)
190
plt.tight_layout()
191
plt.show()
192
193