Skip to content

Commit

Permalink
Ran JuliaFormatter
Browse files Browse the repository at this point in the history
  • Loading branch information
rossviljoen committed Jul 30, 2021
1 parent 878b214 commit be96722
Show file tree
Hide file tree
Showing 9 changed files with 125 additions and 148 deletions.
58 changes: 20 additions & 38 deletions examples/classification.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ using DelimitedFiles
using IterTools

using Plots
default(; legend=:outertopright, size=(700, 400))
default(; legend = :outertopright, size = (700, 400))

using Random
Random.seed!(1234)
Expand Down Expand Up @@ -40,45 +40,33 @@ fx = f(x)
# %%
# Then, plot some samples from the prior underlying GP
x_plot = 0:0.02:6
prior_f_samples = rand(f.f(x_plot, 1e-6),20)
prior_f_samples = rand(f.f(x_plot, 1e-6), 20)

plt = plot(
x_plot,
prior_f_samples;
seriescolor="red",
linealpha=0.2,
label=""
)
scatter!(plt, x, y; seriescolor="blue", label="Data points")
plt = plot(x_plot, prior_f_samples; seriescolor = "red", linealpha = 0.2, label = "")
scatter!(plt, x, y; seriescolor = "blue", label = "Data points")


# %%
# Plot the same samples, but pushed through a logistic sigmoid to constrain
# them in (0, 1).
prior_y_samples = mean.(f.lik.(prior_f_samples))

plt = plot(
x_plot,
prior_y_samples;
seriescolor="red",
linealpha=0.2,
label=""
)
scatter!(plt, x, y; seriescolor="blue", label="Data points")
plt = plot(x_plot, prior_y_samples; seriescolor = "red", linealpha = 0.2, label = "")
scatter!(plt, x, y; seriescolor = "blue", label = "Data points")


# %%
# A simple Flux model
using Flux

struct SVGPModel
k # kernel parameters
m # variational mean
A # variational covariance
z # inducing points
k::Any # kernel parameters
m::Any # variational mean
A::Any # variational covariance
z::Any # inducing points
end

@Flux.functor SVGPModel (k, m, A,) # Don't train the inducing inputs
Flux.@functor SVGPModel (k, m, A) # Don't train the inducing inputs

lik = BernoulliLikelihood()
jitter = 1e-4
Expand All @@ -92,9 +80,9 @@ function (m::SVGPModel)(x)
return fx, fu, q
end

function flux_loss(x, y; n_data=length(y))
function flux_loss(x, y; n_data = length(y))
fx, fu, q = model(x)
return -SparseGPs.elbo(fx, y, fu, q; n_data, method=MonteCarlo())
return -SparseGPs.elbo(fx, y, fu, q; n_data, method = MonteCarlo())
end

# %%
Expand All @@ -121,7 +109,7 @@ Flux.train!(
(x, y) -> flux_loss(x, y),
parameters,
ncycle([(x, y)], 2000), # Train for 1000 epochs
opt
opt,
)

# %%
Expand All @@ -136,13 +124,7 @@ l_post = LatentGP(post, BernoulliLikelihood(), jitter)

post_f_samples = rand(l_post.f(x_plot, 1e-6), 20)

plt = plot(
x_plot,
post_f_samples;
seriescolor="red",
linealpha=0.2,
legend=false
)
plt = plot(x_plot, post_f_samples; seriescolor = "red", linealpha = 0.2, legend = false)

# %%
# As above, push these samples through a logistic sigmoid to get posterior predictions.
Expand All @@ -151,10 +133,10 @@ post_y_samples = mean.(l_post.lik.(post_f_samples))
plt = plot(
x_plot,
post_y_samples;
seriescolor="red",
linealpha=0.2,
seriescolor = "red",
linealpha = 0.2,
# legend=false,
label=""
label = "",
)
scatter!(plt, x, y; seriescolor="blue", label="Data points")
vline!(z; label="Pseudo-points")
scatter!(plt, x, y; seriescolor = "blue", label = "Data points")
vline!(z; label = "Pseudo-points")
65 changes: 32 additions & 33 deletions examples/regression.jl
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ using Optim
using IterTools

using Plots
default(; legend=:outertopright, size=(700, 400))
default(; legend = :outertopright, size = (700, 400))

using Random
Random.seed!(1234)
Expand All @@ -23,7 +23,7 @@ N = 10000 # Number of training points
x = rand(Uniform(-1, 1), N)
y = g.(x) + 0.3 * randn(N)

scatter(x, y; xlabel="x", ylabel="y", legend=false)
scatter(x, y; xlabel = "x", ylabel = "y", legend = false)


# %%
Expand All @@ -34,13 +34,13 @@ lik_noise = 0.3
jitter = 1e-5

struct SVGPModel
k # kernel parameters
m # variational mean
A # variational covariance
z # inducing points
k::Any # kernel parameters
m::Any # variational mean
A::Any # variational covariance
z::Any # inducing points
end

@Flux.functor SVGPModel (k, m, A,) # Don't train the inducing inputs
Flux.@functor SVGPModel (k, m, A) # Don't train the inducing inputs

function make_kernel(k)
return softplus(k[1]) * (SqExponentialKernel() ScaleTransform(softplus(k[2])))
Expand Down Expand Up @@ -68,7 +68,7 @@ function posterior(m::SVGPModel)
end

# Return the loss given data - in this case the negative ELBO.
function flux_loss(x, y; n_data=length(y))
function flux_loss(x, y; n_data = length(y))
fx, fu, q = model(x)
return -SparseGPs.elbo(fx, y, fu, q; n_data)
end
Expand All @@ -90,7 +90,7 @@ model = SVGPModel(k, m, A, z)
b = 100 # minibatch size
opt = ADAM(0.001)
parameters = Flux.params(model)
data_loader = Flux.Data.DataLoader((x, y), batchsize=b)
data_loader = Flux.Data.DataLoader((x, y), batchsize = b)

# %%
# Negative ELBO before training
Expand All @@ -99,10 +99,10 @@ println(flux_loss(x, y))
# %%
# Train the model
Flux.train!(
(x, y) -> flux_loss(x, y; n_data=N),
(x, y) -> flux_loss(x, y; n_data = N),
parameters,
ncycle(data_loader, 300), # Train for 300 epochs
opt
opt,
)

# %%
Expand All @@ -116,16 +116,16 @@ post = posterior(model)
scatter(
x,
y;
markershape=:xcross,
markeralpha=0.1,
xlim=(-1, 1),
xlabel="x",
ylabel="y",
title="posterior (VI with sparse grid)",
label="Train Data",
markershape = :xcross,
markeralpha = 0.1,
xlim = (-1, 1),
xlabel = "x",
ylabel = "y",
title = "posterior (VI with sparse grid)",
label = "Train Data",
)
plot!(-1:0.001:1, post; label="Posterior")
vline!(z; label="Pseudo-points")
plot!(-1:0.001:1, post; label = "Posterior")
vline!(z; label = "Pseudo-points")


# %% There is a closed form optimal solution for the variational posterior q(u)
Expand All @@ -137,8 +137,8 @@ function exact_q(fu, fx, y)
σ² = fx.Σy[1]
Kuf = cov(fu, fx)
Kuu = Symmetric(cov(fu))
Σ = (Symmetric(cov(fu) + (1/σ²) * Kuf * Kuf'))
m = ((1/σ²)*Kuu*\Kuf)) * y
Σ = (Symmetric(cov(fu) + (1 / σ²) * Kuf * Kuf'))
m = ((1 / σ²) * Kuu * \ Kuf)) * y
S = Symmetric(Kuu *\ Kuu))
return MvNormal(m, S)
end
Expand All @@ -164,15 +164,14 @@ AbstractGPs.elbo(fx, y, fu)
scatter(
x,
y;
markershape=:xcross,
markeralpha=0.1,
xlim=(-1, 1),
xlabel="x",
ylabel="y",
title="posterior (VI with sparse grid)",
label="Train Data",
markershape = :xcross,
markeralpha = 0.1,
xlim = (-1, 1),
xlabel = "x",
ylabel = "y",
title = "posterior (VI with sparse grid)",
label = "Train Data",
)
plot!(-1:0.001:1, ap_ex; label="SVGP posterior")
plot!(-1:0.001:1, ap_tits; label="Titsias posterior")
vline!(z; label="Pseudo-points")

plot!(-1:0.001:1, ap_ex; label = "SVGP posterior")
plot!(-1:0.001:1, ap_tits; label = "Titsias posterior")
vline!(z; label = "Pseudo-points")
14 changes: 2 additions & 12 deletions src/SparseGPs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -13,19 +13,9 @@ using FillArrays
using KLDivergences

using AbstractGPs:
AbstractGP,
FiniteGP,
LatentFiniteGP,
ApproxPosteriorGP,
At_A,
diag_At_A,
Xt_invA_X
AbstractGP, FiniteGP, LatentFiniteGP, ApproxPosteriorGP, At_A, diag_At_A

export SVGP,
Default,
Analytic,
Quadrature,
MonteCarlo
export SVGP, Default, Analytic, Quadrature, MonteCarlo

include("elbo.jl")
include("svgp.jl")
Expand Down
Loading

0 comments on commit be96722

Please sign in to comment.