-
Notifications
You must be signed in to change notification settings - Fork 1
/
test_optimization.py
67 lines (57 loc) · 1.6 KB
/
test_optimization.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
import numpy as np
from numpy.testing import assert_allclose, run_module_suite, assert_
import pytest
import multiprocessing
from repeater_mc import repeater_mc
from repeater_algorithm import repeater_sim
from protocol_units import *
from logging_utilities import *
from utility_functions import *
from optimize_cutoff import CutoffOptimizer
def test_opt_adaptive_trunc():
"""
t_trunc should be increased
"""
np.random.seed(1)
parameters = {
"protocol": (0, ),
"p_gen": 0.1,
"p_swap": 0.5,
"w0": 0.99,
"t_coh": 30,
"t_trunc": 300
}
logging.info("Full tau optimization\n")
opt = CutoffOptimizer(adaptive=True)
best_cutoff_dict = opt.run(parameters)
assert(best_cutoff_dict["memory_time"] == (6,))
def test_opt_adaptive_search_range():
"""
The search range should be restricted
"""
np.random.seed(3)
parameters = {
"protocol": (0, 0),
"p_gen": 0.2,
"p_swap": 0.6,
"w0": 0.95,
"t_coh": 300,
"t_trunc": 400
}
opt = CutoffOptimizer(adaptive=True, popsize=5)
best_cutoff_dict = opt.run(parameters)
assert_allclose(best_cutoff_dict["memory_time"], (18, 30))
def test_opt_uniform():
np.random.seed(0)
parameters = {
"protocol": (0, 0),
"p_gen": 0.1,
"p_swap": 0.8,
"t_trunc": 500,
"w0": 0.99,
"t_coh": 400,
}
opt = CutoffOptimizer(
opt_kind="uniform_de", adaptive=True)
best_cutoff_dict = opt.run(parameters)
assert_allclose(best_cutoff_dict["memory_time"], (45, 45))