forked from krasserm/bayesian-machine-learning
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbayesian_linear_regression_util.py
56 lines (37 loc) · 1.45 KB
/
bayesian_linear_regression_util.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
def plot_data(x, t):
plt.scatter(x, t, marker='o', c="k", s=20)
def plot_truth(x, y, label='Truth'):
plt.plot(x, y, 'k--', label=label)
def plot_predictive(x, y, std, y_label='Prediction', std_label='Uncertainty', plot_xy_labels=True):
y = y.ravel()
std = std.ravel()
plt.plot(x, y, label=y_label)
plt.fill_between(x.ravel(), y + std, y - std, alpha = 0.5, label=std_label)
if plot_xy_labels:
plt.xlabel('x')
plt.ylabel('y')
def plot_posterior_samples(x, ys, plot_xy_labels=True):
plt.plot(x, ys[:, 0], 'r-', alpha=0.5, label='Post. samples')
for i in range(1, ys.shape[1]):
plt.plot(x, ys[:, i], 'r-', alpha=0.5)
if plot_xy_labels:
plt.xlabel('x')
plt.ylabel('y')
def plot_posterior(mean, cov, w0, w1):
resolution = 100
grid_x = grid_y = np.linspace(-1, 1, resolution)
grid_flat = np.dstack(np.meshgrid(grid_x, grid_y)).reshape(-1, 2)
densities = stats.multivariate_normal.pdf(grid_flat, mean=mean.ravel(), cov=cov).reshape(resolution, resolution)
plt.imshow(densities, origin='lower', extent=(-1, 1, -1, 1))
plt.scatter(w0, w1, marker='x', c="r", s=20, label='Truth')
plt.xlabel('w0')
plt.ylabel('w1')
def print_comparison(title, a, b, a_prefix='np', b_prefix='br'):
print(title)
print('-' * len(title))
print(f'{a_prefix}:', a)
print(f'{b_prefix}:', b)
print()