forked from lbliek/MVRSM
-
Notifications
You must be signed in to change notification settings - Fork 0
/
linear_MIVABOfunction.py
102 lines (80 loc) · 3.88 KB
/
linear_MIVABOfunction.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
# Code received from MiVaBO author Erik Daxberger, 23-03-2020
from itertools import combinations
import numpy as np
class Linear():
""" Function that is linear in arbitrary features of discrete and continuous variables """
def __init__(
self,
n_vars=16, # total number of variables
n_vars_d=8, # number of discrete variables
alpha=1.0, # prior precision
beta=1.0, # observation noise precision
sigma=1.0, # kernel lengthscale / bandwidth
n_feats_c=16, # number of continuous features
noisy=False, # should we add observation noise?
laplace=True, # should we sample the weights from a Laplace distribution?
):
# set variables
self.n_vars_d = n_vars_d
self.n_vars_c = n_vars - self.n_vars_d
self.beta = beta
self.n_feats_c = n_feats_c
self.noisy = noisy
self.vars_d_sq = list(combinations(range(self.n_vars_d), r=2))
self.n_feats_d = self.n_vars_d + len(self.vars_d_sq)
self.sample_feats_c(sigma)
n_feats_m = self.n_feats_d * self.n_feats_c
self.n_feats_total = 1 + self.n_feats_d + self.n_feats_c + n_feats_m
# sample the coefficients
self.sample_coeffs(alpha, laplace)
def sample_coeffs(self, alpha, laplace):
""" Sample the coefficients from either a Laplace or a Gaussian distribution """
if laplace:
self.w = np.random.laplace(0.0, 1.0 / alpha, self.n_feats_total)
# ensure sparsity by setting small weights to zero
self.w[self.w < 1.0 / alpha] = 0.0
else:
self.w = np.random.normal(0.0, 1.0 / alpha, self.n_feats_total)
# extract the coefficients
self.w0 = self.w[0]
self.w_d = self.w[1 : 1 + self.n_feats_d]
self.w_c = self.w[1 + self.n_feats_d : 1 + self.n_feats_d + self.n_feats_c]
self.w_m = self.w[1 + self.n_feats_d + self.n_feats_c : self.n_feats_total]
def sample_feats_c(self, sigma):
""" sample the continuous feature parameters, i.e.,
random Fourier feature / random kitchen sink parameters U and b """
self.rks_U = np.random.normal(size=(self.n_feats_c, self.n_vars_c)) * (1.0 / sigma)
self.rks_b = 2.0 * np.pi * np.random.rand(self.n_feats_c)
self.rks_c = np.sqrt(2.0 / self.n_feats_c)
def phi_c(self, x_c):
""" basis functions / features for the continuous variables:
random Fourier features / random kitchen sinks """
return self.rks_c * np.cos(np.matmul(self.rks_U, x_c) + self.rks_b)
def phi_d(self, x_d):
""" basis functions / features for the discrete variables:
(discrete) Fourier basis functions (-> 2nd order multi-linear polynomial) """
phi = [x_d[i] for i in range(self.n_vars_d)]
phi += [x_d[i] * x_d[j] for (i, j) in self.vars_d_sq]
return np.array(phi)
def phi_m(self, x_d, x_c):
""" mixed basis functions / features:
pairwise combinations of discrete and continuous features """
return np.ndarray.flatten(np.outer(self.phi_d(x_d), self.phi_c(x_c)))
def objective_function(self, x):
""" objective function
f(x) = w0 + f_d(x_d) + f_c(x_c) + f_m(x_d, x_c) """
w0 = self.w0
f_d = self.f_d(x[: self.n_vars_d])
f_c = self.f_c(x[self.n_vars_d :])
f_m = self.f_m(x[: self.n_vars_d], x[self.n_vars_d :])
f = w0 + f_d + f_c + f_m
return f if not self.noisy else np.random.normal(f, 1 / self.beta)
def f_d(self, x_d):
""" linear model of discrete features """
return np.dot(self.phi_d(x_d), self.w_d)
def f_c(self, x_c):
""" linear model of continuous features """
return np.dot(self.phi_c(x_c), self.w_c)
def f_m(self, x_d, x_c):
""" linear model mixed features """
return np.dot(self.phi_m(x_d, x_c), self.w_m)