Skip to content

Commit

Permalink
less confusing
Browse files Browse the repository at this point in the history
  • Loading branch information
lazyprogrammer committed Sep 28, 2017
1 parent 3c3d4fd commit 6b3a6d7
Showing 1 changed file with 14 additions and 7 deletions.
21 changes: 14 additions & 7 deletions linear_regression_class/lr_poly.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
# notes for this course can be found at:
# https://deeplearningcourses.com/c/data-science-linear-regression-in-python
# https://www.udemy.com/data-science-linear-regression-in-python

from __future__ import print_function, division
from builtins import range
# Note: you may need to update your version of future
# sudo pip install -U future

import numpy as np
import matplotlib.pyplot as plt
Expand All @@ -16,15 +19,16 @@
x, y = line.split(',')
x = float(x)
X.append([1, x, x*x]) # add the bias term x0 = 1
# our model is therefore y_hat = w0 + w1 * x + w2 * x**2
Y.append(float(y))

# let's turn X and Y into numpy arrays since that will be useful later
X = np.array(X)
Y = np.array(Y)


# let's plot the data to see what it looks like
plt.scatter(X[:,1], Y)
plt.title("The data we're trying to fit")
plt.show()


Expand All @@ -34,19 +38,22 @@
# note: the * operator does element-by-element multiplication in numpy
# np.dot() does what we expect for matrix multiplication
w = np.linalg.solve(np.dot(X.T, X), np.dot(X.T, Y))
Yhat = np.dot(X, w)


# let's plot everything together to make sure it worked
plt.scatter(X[:,1], Y)
plt.plot(sorted(X[:,1]), sorted(Yhat))
# note: shortcut since monotonically increasing
# x-axis values have to be in order since the points
# are joined from one element to the next

# to plot our quadratic model predictions, let's
# create a line of x's and calculate the predicted y's
x_line = np.linspace(X[:,1].min(), X[:,1].max())
y_line = w[0] + w[1] * x_line + w[2] * x_line * x_line
plt.plot(x_line, y_line)
plt.title("Our fitted quadratic")
plt.show()


# determine how good the model is by computing the r-squared
Yhat = X.dot(w)
d1 = Y - Yhat
d2 = Y - Y.mean()
r2 = 1 - d1.dot(d1) / d2.dot(d2)
Expand Down

0 comments on commit 6b3a6d7

Please sign in to comment.