Skip to content

Commit

Permalink
fix: x0 in BaseSampler shape, updates sampler & observer tests
Browse files Browse the repository at this point in the history
  • Loading branch information
BradyPlanden committed Oct 23, 2024
1 parent 878b6a4 commit 7a50b7a
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 29 deletions.
4 changes: 3 additions & 1 deletion pybop/samplers/base_sampler.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,9 @@ def __init__(

# Set initial values, if x0 is None, initial values are unmodified.
self.parameters.update(initial_values=x0 if x0 is not None else None)
self._x0 = self.parameters.reset_initial_value(apply_transform=True)
self._x0 = self.parameters.reset_initial_value(apply_transform=True).reshape(
1, -1
)

if len(self._x0) != self._n_chains or len(self._x0) == 1:
self._x0 = np.tile(self._x0, (self._n_chains, 1))
Expand Down
58 changes: 32 additions & 26 deletions tests/integration/test_monte_carlo.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,10 @@ class Test_Sampling_SPM:
@pytest.fixture(autouse=True)
def setup(self):
self.ground_truth = np.clip(
np.asarray([0.55, 0.55]) + np.random.normal(loc=0.0, scale=0.05, size=2),
a_min=0.4,
a_max=0.75,
np.asarray([0.55, 0.55, 3e-3])
+ np.random.normal(0, [5e-2, 5e-2, 1e-4], size=3),
[0.4, 0.4, 1e-5],
[0.7, 0.7, 1e-2],
)

@pytest.fixture
Expand Down Expand Up @@ -58,19 +59,11 @@ def parameters(self):
def init_soc(self, request):
return request.param

@pytest.fixture(
params=[
pybop.GaussianLogLikelihoodKnownSigma,
]
)
def cost_class(self, request):
return request.param

def noise(self, sigma, values):
return np.random.normal(0, sigma, values)

@pytest.fixture
def spm_likelihood(self, model, parameters, cost_class, init_soc):
def log_posterior(self, model, parameters, init_soc):
# Form dataset
solution = self.get_data(model, init_soc)
dataset = pybop.Dataset(
Expand All @@ -82,9 +75,22 @@ def spm_likelihood(self, model, parameters, cost_class, init_soc):
}
)

# Define the cost to optimise
# Define the posterior to optimise
problem = pybop.FittingProblem(model, parameters, dataset)
return cost_class(problem, sigma0=0.002)
likelihood = pybop.GaussianLogLikelihood(problem, sigma0=0.002 * 1.2)
return pybop.LogPosterior(likelihood)

@pytest.fixture
def map_estimate(self, log_posterior):
common_args = {
"max_iterations": 100,
"max_unchanged_iterations": 35,
"absolute_tolerance": 1e-7,
}
optim = pybop.CMAES(log_posterior, **common_args)
results = optim.run()

return results.x

@pytest.mark.parametrize(
"quick_sampler",
Expand All @@ -98,29 +104,29 @@ def spm_likelihood(self, model, parameters, cost_class, init_soc):
],
)
@pytest.mark.integration
def test_sampling_spm(self, quick_sampler, spm_likelihood):
posterior = pybop.LogPosterior(spm_likelihood)

def test_sampling_spm(self, quick_sampler, log_posterior, map_estimate):
x0 = np.clip(
map_estimate + np.random.normal(0, [5e-3, 5e-3, 1e-4], size=3),
[0.4, 0.4, 1e-5],
[0.75, 0.75, 5e-2],
)
# set common args
common_args = {
"log_pdf": posterior,
"log_pdf": log_posterior,
"chains": 3,
"warm_up": 250,
"x0": x0,
"warm_up": 150,
"max_iterations": 550,
}

if issubclass(quick_sampler, DifferentialEvolutionMCMC):
common_args["warm_up"] = 750
common_args["max_iterations"] = 900
# construct and run
sampler = quick_sampler(**common_args)
results = sampler.run()
chains = sampler.run()

# Assert both final sample and posterior mean
x = np.mean(results, axis=1)
x = np.mean(chains, axis=1)
for i in range(len(x)):
np.testing.assert_allclose(x[i], self.ground_truth, atol=2.5e-2)
np.testing.assert_allclose(results[i][-1], self.ground_truth, atol=2.0e-2)
np.testing.assert_allclose(x[i], self.ground_truth, atol=1.5e-2)

def get_data(self, model, init_soc):
initial_state = {"Initial SoC": init_soc}
Expand Down
3 changes: 1 addition & 2 deletions tests/integration/test_observer_parameterisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,15 +82,14 @@ def test_observer_exponential_decay(self, parameters, model):

# Generate cost function, and optimisation class
cost = pybop.ObserverCost(observer)
optim = pybop.CMAES(cost, verbose=True)
optim = pybop.CMAES(cost, max_unchanged_iterations=25, verbose=True)

# Initial Cost
x0 = cost.parameters.initial_value()
initial_cost = optim.cost(x0)

# Run optimisation
results = optim.run()
print("Estimated parameters:", results.x)

# Assertions
if not np.allclose(x0, self.ground_truth, atol=1e-5):
Expand Down

0 comments on commit 7a50b7a

Please sign in to comment.