-
Notifications
You must be signed in to change notification settings - Fork 0
/
gradient.py
59 lines (40 loc) · 1.26 KB
/
gradient.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import matplotlib.pyplot as plt
import numpy as np
def compute_error(points, b, m):
X = points[:, 0]
y = points[:, 1]
h = (X * m) + b
return sum(y - h)
def gradient_step(points, b, m, learning_rate):
X = points[:, 0]
y = points[:, 1]
N = float(len(points))
h = (X * m) + b
b_gradient = -(2/N) * sum(y - h)
m_gradient = -(2/N) * sum(X * (y - h))
b -= learning_rate * b_gradient
m -= learning_rate * m_gradient
return b, m
def gradient_runner(points, initial_b, initial_m, learning_rate, num_iterations):
b = initial_b
m = initial_m
for i in range(num_iterations):
b, m = gradient_step(points, b, m, learning_rate)
print("Error after %d iterations: %.6f" % (i+1, compute_error(points, b, m)))
return [b, m]
def run():
points = np.genfromtxt("data.csv", delimiter=',')
learning_rate = 0.0001
initial_b = 0
initial_m = 0
num_iterations = 1000
[b, m] = gradient_runner(points, initial_b, initial_m, learning_rate, num_iterations)
X = points[:, 0]
y = points[:, 1]
calc_points = X * m + b
plt.title('Basic linear regression with gradient descent')
plt.scatter(X, y)
plt.plot(X, calc_points)
plt.show()
if __name__ == '__main__':
run()