Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
Download

📚 The CoCalc Library - books, templates and other resources

132928 views
License: OTHER
1
""" Simple linear regression example in TensorFlow
2
This program tries to predict the number of thefts from
3
the number of fire in the city of Chicago
4
Author: Chip Huyen
5
Prepared for the class CS 20SI: "TensorFlow for Deep Learning Research"
6
cs20si.stanford.edu
7
"""
8
import os
9
os.environ['TF_CPP_MIN_LOG_LEVEL']='2'
10
11
import numpy as np
12
import matplotlib.pyplot as plt
13
import tensorflow as tf
14
import xlrd
15
16
import utils
17
18
DATA_FILE = 'data/fire_theft.xls'
19
20
# Step 1: read in data from the .xls file
21
book = xlrd.open_workbook(DATA_FILE, encoding_override="utf-8")
22
sheet = book.sheet_by_index(0)
23
data = np.asarray([sheet.row_values(i) for i in range(1, sheet.nrows)])
24
n_samples = sheet.nrows - 1
25
26
# Step 2: create placeholders for input X (number of fire) and label Y (number of theft)
27
X = tf.placeholder(tf.float32, name='X')
28
Y = tf.placeholder(tf.float32, name='Y')
29
30
# Step 3: create weight and bias, initialized to 0
31
w = tf.Variable(0.0, name='weights')
32
b = tf.Variable(0.0, name='bias')
33
34
# Step 4: build model to predict Y
35
Y_predicted = X * w + b
36
37
# Step 5: use the square error as the loss function
38
loss = tf.square(Y - Y_predicted, name='loss')
39
# loss = utils.huber_loss(Y, Y_predicted)
40
41
# Step 6: using gradient descent with learning rate of 0.01 to minimize loss
42
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001).minimize(loss)
43
44
with tf.Session() as sess:
45
# Step 7: initialize the necessary variables, in this case, w and b
46
sess.run(tf.global_variables_initializer())
47
48
writer = tf.summary.FileWriter('./graphs/linear_reg', sess.graph)
49
50
# Step 8: train the model
51
for i in range(50): # train the model 100 epochs
52
total_loss = 0
53
for x, y in data:
54
# Session runs train_op and fetch values of loss
55
_, l = sess.run([optimizer, loss], feed_dict={X: x, Y:y})
56
total_loss += l
57
print('Epoch {0}: {1}'.format(i, total_loss/n_samples))
58
59
# close the writer when you're done using it
60
writer.close()
61
62
# Step 9: output the values of w and b
63
w, b = sess.run([w, b])
64
65
# plot the results
66
X, Y = data.T[0], data.T[1]
67
plt.plot(X, Y, 'bo', label='Real data')
68
plt.plot(X, X * w + b, 'r', label='Predicted data')
69
plt.legend()
70
plt.show()
71