-
Notifications
You must be signed in to change notification settings - Fork 1.5k
/
01_linear_regression.py
executable file
·33 lines (21 loc) · 1.07 KB
/
01_linear_regression.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
#!/usr/bin/env python
import tensorflow as tf
import numpy as np
trX = np.linspace(-1, 1, 101)
trY = 2 * trX + np.random.randn(*trX.shape) * 0.33 # create a y value which is approximately linear but with some random noise
X = tf.placeholder("float") # create symbolic variables
Y = tf.placeholder("float")
def model(X, w):
return tf.multiply(X, w) # lr is just X*w so this model line is pretty simple
w = tf.Variable(0.0, name="weights") # create a shared variable (like theano.shared) for the weight matrix
y_model = model(X, w)
cost = tf.square(Y - y_model) # use square error for cost function
train_op = tf.train.GradientDescentOptimizer(0.01).minimize(cost) # construct an optimizer to minimize cost and fit line to my data
# Launch the graph in a session
with tf.Session() as sess:
# you need to initialize variables (in this case just variable W)
tf.global_variables_initializer().run()
for i in range(100):
for (x, y) in zip(trX, trY):
sess.run(train_op, feed_dict={X: x, Y: y})
print(sess.run(w)) # It should be something around 2