-
Notifications
You must be signed in to change notification settings - Fork 7
/
ex6.py
132 lines (101 loc) · 3.44 KB
/
ex6.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
# Exercise 6 - Gradient Descent Methods, Backtracking Line Search and Newton's method (unconstrained)
# TOML-MIRI
# Marcel Cases
# 01-apr-2021
#
# min(1) f(x) = 2*x^2 - 0.5, var x
# min(2) f(x) = 2*x^4 - 4*x^2 + x - 0.5, var x
#%%
# Backtracking Line Search
import numpy as np
from numdifftools import Derivative
import time
print('\nSOLVING USING BLS\n')
def backtrack(dfx, x0, step):
incumbent = x0 # result
iters = 0
acc = 1
while (acc >= 1e-4):
newincumbent = incumbent - step*dfx(incumbent)
acc = np.absolute(newincumbent - incumbent)
incumbent = newincumbent
iters += 1
return incumbent, iters, acc, step
def show_results(func, incumbent, iters, acc, step, time):
print("min f: ", func)
print("at x = ", incumbent)
print("iters: ", iters)
print("acc: ", acc)
print("step: ", step)
print("time (ms): ", time, "\n")
f = lambda x: 2*x**2 - 0.5
start_time = time.time()*1000
incumbent, iters, acc, step = backtrack(dfx = Derivative(f), x0 = 3., step = 1e-3)
end_time = time.time()*1000
show_results(f(incumbent), incumbent, iters, acc, step, time=end_time-start_time)
f = lambda x: 2*x**4 - 4*x**2 + x - 0.5
x0s = [-2., -0.5, 0.5, 2.]
for x0 in x0s:
start_time = time.time()*1000
incumbent, iters, acc, step = backtrack(dfx = Derivative(f), x0 = x0, step = 1e-3)
end_time = time.time()*1000
show_results(f(incumbent), incumbent, iters, acc, step, time=end_time-start_time)
#%%
# Newton's Method for optimization (SLSQP solver from Scipy uses this method)
from scipy.optimize import minimize
import numpy as np
from numdifftools import Jacobian, Hessian
print('\nSOLVING USING SCIPY\n')
# First function to optimize
fun = lambda x: 2*x**2 - 0.5 # objective function
fun_Jac = lambda x: Jacobian(lambda x: fun(x))(x).ravel() # Jacobian
fun_Hess = lambda x: Hessian(lambda x: fun(x))(x) # Hessian
cons = () # unconstrained
bnds = ((None, None), )*1 # unbounded
# initial guess
x0 = 3.0
start_time = time.time()*1000
res = minimize(fun, x0, bounds=bnds, constraints=cons, jac=fun_Jac, hess=fun_Hess)
end_time = time.time()*1000
print('\n',res)
print("JAC+HESS: optimal value p*", res.fun)
print("JAC+HESS: optimal var: x = ", res.x)
print("exec time (ms): ", end_time - start_time)
# Second function to optimize
fun = lambda x: 2*x**4 - 4*x**2 + x - 0.5 # objective function
fun_Jac = lambda x: Jacobian(lambda x: fun(x))(x).ravel() # Jacobian
fun_Hess = lambda x: Hessian(lambda x: fun(x))(x) # Hessian
cons = () # unconstrained
bnds = ((None, None), )*1 # unbounded
# initial guesses
x0s = [-2., -0.5, 0.5, 2.]
for x0 in x0s:
start_time = time.time()*1000
res = minimize(fun, x0, bounds=bnds, constraints=cons, jac=fun_Jac, hess=fun_Hess)
end_time = time.time()*1000
print('\n',res)
print("JAC+HESS: optimal value p*", res.fun)
print("JAC+HESS: optimal var: x = ", res.x)
print("exec time (ms): ", end_time - start_time)
#%%
# Newton's Method for finding roots
import numpy as np
from numdifftools import Derivative
def dx(f, x):
return abs(0-f(x))
def newton(f, df, x0, epsilon):
delta = dx(f, x0)
while delta > epsilon:
x0 = x0 - f(x0)/df(x0)
delta = dx(f, x0)
print('Root at: ', x0)
print('f(x) at root: ', f(x0))
return delta
def f(x):
return 2*x**4 - 4*x**2 + x - 0.5
def df(x):
return 8*x**3 - 8*x + 1
x0s = [-2., -0.5, 0.5, 2.]
for x0 in x0s:
newton(f, df, x0, 1e-5)
# %%