-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathskewnormal_msc.py
81 lines (63 loc) · 2.12 KB
/
skewnormal_msc.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
import autograd.numpy as np
import autograd.numpy.random as npr
import autograd.scipy.stats.norm as norm
from autograd import grad
from autograd.core import getval
from skewnormal_optimizer import sgd
from autograd.extend import notrace_primitive
def discretesampling(w):
u = npr.rand()
bins = np.cumsum(w)
return np.digitize(u,bins)
# Define true normal parameters
loc = 0.5
scale = 2.
shape = 5.
# Compute true expectations
delta = shape/np.sqrt(1.+shape**2)
mu_true = loc+scale*delta*np.sqrt(2./np.pi)
sigma2_true = (1.-2*delta**2/np.pi)*scale**2
def logq(params, x):
mu, log_sigma = params
std_val = (x-mu)/np.exp(log_sigma)
return -0.5*std_val**2 - 0.5*np.log(2.*np.pi) - log_sigma
def logp(x):
std_val = (x-loc)/scale
return np.log(2.)-np.log(scale)+norm.logpdf(std_val)+norm.logcdf(shape*std_val)
xprev = 0.
S = 2
@notrace_primitive
def generate_samples(params, seed, xprev):
mu, log_sigma = params
x = mu + np.exp(log_sigma)*seed.normal(size=S)
idx = discretesampling(np.ones(S)/float(S))
x[idx] = xprev
logw = logp(x) - logq(params, x)
maxLogW = np.max(logw)
uw = np.exp(logw-maxLogW)
w = uw / np.sum(uw)
idx = discretesampling(w)
return w, x, x[idx]
seed = npr.RandomState(0)
def objective(params, iter):
global xprev
w, x, xprev = generate_samples(params, seed, xprev)
return -np.sum(w*logq(params, x))
objective_grad = grad(objective)
init_params = (mu_true, 0.5*np.log(sigma2_true))
num_iters = 1000000
step_size = 0.5
params_vec = np.zeros((num_iters,2))
print(" Epoch | Objective ")
def print_perf(params, iter, grad):
m, ls = params
params_vec[iter, 0] = m
params_vec[iter, 1] = ls
if iter % 5000 == 0:
bound = np.mean(objective(params, iter))
message = "{:15}|{:20}|".format(iter, bound)
print(message)
# The optimizers provided can optimize lists, tuples, or dicts of parameters.
optimized_params = sgd(objective_grad, init_params, step_size=step_size,
num_iters=num_iters, callback=print_perf)
np.save('results/skewnormal_S'+str(S)+'_msc.npy', params_vec)