Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Download

📚 The CoCalc Library - books, templates and other resources

132928 views
License: OTHER
1
"""
2
Small helpers for code that is not shown in the notebooks
3
"""
4
5
from sklearn import neighbors, datasets, linear_model
6
import pylab as pl
7
import numpy as np
8
from matplotlib.colors import ListedColormap
9
10
# Create color maps for 3-class classification problem, as with iris
11
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF'])
12
cmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])
13
14
def plot_iris_knn():
15
iris = datasets.load_iris()
16
X = iris.data[:, :2] # we only take the first two features. We could
17
# avoid this ugly slicing by using a two-dim dataset
18
y = iris.target
19
20
knn = neighbors.KNeighborsClassifier(n_neighbors=5)
21
knn.fit(X, y)
22
23
x_min, x_max = X[:, 0].min() - .1, X[:, 0].max() + .1
24
y_min, y_max = X[:, 1].min() - .1, X[:, 1].max() + .1
25
xx, yy = np.meshgrid(np.linspace(x_min, x_max, 100),
26
np.linspace(y_min, y_max, 100))
27
Z = knn.predict(np.c_[xx.ravel(), yy.ravel()])
28
29
# Put the result into a color plot
30
Z = Z.reshape(xx.shape)
31
pl.figure()
32
pl.pcolormesh(xx, yy, Z, cmap=cmap_light)
33
34
# Plot also the training points
35
pl.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold)
36
pl.xlabel('sepal length (cm)')
37
pl.ylabel('sepal width (cm)')
38
pl.axis('tight')
39
40
41
def plot_polynomial_regression():
42
rng = np.random.RandomState(0)
43
x = 2*rng.rand(100) - 1
44
45
f = lambda t: 1.2 * t**2 + .1 * t**3 - .4 * t **5 - .5 * t ** 9
46
y = f(x) + .4 * rng.normal(size=100)
47
48
x_test = np.linspace(-1, 1, 100)
49
50
pl.figure()
51
pl.scatter(x, y, s=4)
52
53
X = np.array([x**i for i in range(5)]).T
54
X_test = np.array([x_test**i for i in range(5)]).T
55
regr = linear_model.LinearRegression()
56
regr.fit(X, y)
57
pl.plot(x_test, regr.predict(X_test), label='4th order')
58
59
X = np.array([x**i for i in range(10)]).T
60
X_test = np.array([x_test**i for i in range(10)]).T
61
regr = linear_model.LinearRegression()
62
regr.fit(X, y)
63
pl.plot(x_test, regr.predict(X_test), label='9th order')
64
65
pl.legend(loc='best')
66
pl.axis('tight')
67
pl.title('Fitting a 4th and a 9th order polynomial')
68
69
pl.figure()
70
pl.scatter(x, y, s=4)
71
pl.plot(x_test, f(x_test), label="truth")
72
pl.axis('tight')
73
pl.title('Ground truth (9th order polynomial)')
74
75
76
77