-
Notifications
You must be signed in to change notification settings - Fork 0
/
GradientDescent.py
59 lines (42 loc) · 1.55 KB
/
GradientDescent.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
from numpy import *
def compute_error_for_given_points(b, m, points):
# sum of squared errors
totalError = 0
for i in range(0, len(points)):
x = points[i,0]
y = points[i,1]
totalError += (y-(m*x +b)) **2
return totalError / float(len(points))
def step_gradient(b_current, m_current, points, learningRate):
# gradient descent
b_gradient = 0
m_gradient = 0
N = float(len(points))
for i in range(0, len(points)):
x = points[i,0]
y = points[i,1]
b_gradient += -(2/N) * (y - ((m_current * x) + b_current))
m_gradient += -(2/N) * x * (y - ((m_current * x) + b_current))
new_b = b_current - (learningRate * b_gradient)
new_m = m_current - (learningRate * m_gradient)
return [new_b, new_m]
def gradient_descent_runner(points, starting_b, starting_m, learning_rate, num_iterations):
b = starting_b
m = starting_m
for i in range(num_iterations):
b,m = step_gradient(b,m,array(points),learning_rate)
return [b,m]
def run():
points = genfromtxt('https://raw.githubusercontent.com/llSourcell/linear_regression_live/master/data.csv', delimiter=',')
#hyper parameter
learning_rate = 0.0001
# y = mx+b (slope formula)
initial_b = 0
initial_m = 0
num_iterations = 1000 #small dataset
[b,m] = gradient_descent_runner(points, initial_b, initial_m, learning_rate, num_iterations)
print(b)
print(m)
print(compute_error_for_given_points(b, m, points))
if __name__ == '__main__':
run()