|
1 | | -"""Tests for constrained optimization with constraint functions.""" |
| 1 | +"""Tests for constrained optimization.""" |
2 | 2 |
|
3 | 3 | import numpy as np |
4 | 4 | import pytest |
5 | 5 |
|
6 | | -from ._parametrize import optimizers_representative |
| 6 | +from ._parametrize import optimizers |
7 | 7 |
|
8 | 8 |
|
9 | | -@pytest.mark.parametrize(*optimizers_representative) |
10 | | -def test_constr_opt_0(Optimizer): |
11 | | - def objective_function(para): |
12 | | - score = -para["x1"] * para["x1"] |
13 | | - return score |
| 9 | +@pytest.mark.parametrize(*optimizers) |
| 10 | +def test_constraint_single(Optimizer): |
| 11 | + search_space = {"x1": np.arange(-10, 10, 1)} |
| 12 | + opt = Optimizer( |
| 13 | + search_space, |
| 14 | + constraints=[lambda p: p["x1"] > -5], |
| 15 | + random_state=42, |
| 16 | + ) |
| 17 | + opt.search(lambda p: -(p["x1"] ** 2), n_iter=30, verbosity=False) |
14 | 18 |
|
15 | | - search_space = { |
16 | | - "x1": np.arange(-15, 15, 1), |
17 | | - } |
18 | | - |
19 | | - def constraint_1(para): |
20 | | - return para["x1"] > -5 |
21 | | - |
22 | | - constraints_list = [constraint_1] |
23 | | - |
24 | | - opt = Optimizer(search_space, constraints=constraints_list) |
25 | | - opt.search(objective_function, n_iter=20) |
| 19 | + values = opt.search_data["x1"].values |
| 20 | + assert np.all(values > -5) |
26 | 21 |
|
27 | | - search_data = opt.search_data |
28 | | - x0_values = search_data["x1"].values |
29 | 22 |
|
30 | | - print("\n search_data \n", search_data, "\n") |
| 23 | +@pytest.mark.parametrize(*optimizers) |
| 24 | +def test_constraint_multiple(Optimizer): |
| 25 | + search_space = {"x1": np.arange(-10, 10, 0.5)} |
| 26 | + opt = Optimizer( |
| 27 | + search_space, |
| 28 | + constraints=[lambda p: p["x1"] > -3, lambda p: p["x1"] < 3], |
| 29 | + random_state=42, |
| 30 | + ) |
| 31 | + opt.search(lambda p: -(p["x1"] ** 2), n_iter=30, verbosity=False) |
31 | 32 |
|
32 | | - assert np.all(x0_values > -5) |
| 33 | + values = opt.search_data["x1"].values |
| 34 | + assert np.all(values > -3) |
| 35 | + assert np.all(values < 3) |
33 | 36 |
|
34 | 37 |
|
35 | | -@pytest.mark.parametrize(*optimizers_representative) |
36 | | -def test_constr_opt_1(Optimizer): |
37 | | - def objective_function(para): |
38 | | - score = -(para["x1"] * para["x1"] + para["x2"] * para["x2"]) |
39 | | - return score |
40 | | - |
| 38 | +@pytest.mark.parametrize(*optimizers) |
| 39 | +def test_constraint_2d(Optimizer): |
41 | 40 | search_space = { |
42 | 41 | "x1": np.arange(-10, 10, 1), |
43 | 42 | "x2": np.arange(-10, 10, 1), |
44 | 43 | } |
45 | | - |
46 | | - def constraint_1(para): |
47 | | - return para["x1"] > -5 |
48 | | - |
49 | | - constraints_list = [constraint_1] |
50 | | - |
51 | | - opt = Optimizer(search_space, constraints=constraints_list) |
52 | | - opt.search(objective_function, n_iter=30) |
53 | | - |
54 | | - search_data = opt.search_data |
55 | | - x0_values = search_data["x1"].values |
56 | | - |
57 | | - print("\n search_data \n", search_data, "\n") |
58 | | - |
59 | | - assert np.all(x0_values > -5) |
60 | | - |
61 | | - |
62 | | -@pytest.mark.parametrize(*optimizers_representative) |
63 | | -def test_constr_opt_2(Optimizer): |
| 44 | + opt = Optimizer( |
| 45 | + search_space, |
| 46 | + constraints=[lambda p: p["x1"] > 0, lambda p: p["x2"] > 0], |
| 47 | + random_state=42, |
| 48 | + ) |
| 49 | + opt.search( |
| 50 | + lambda p: -(p["x1"] ** 2 + p["x2"] ** 2), |
| 51 | + n_iter=50, |
| 52 | + verbosity=False, |
| 53 | + ) |
| 54 | + |
| 55 | + data = opt.search_data |
| 56 | + assert np.all(data["x1"].values > 0) |
| 57 | + assert np.all(data["x2"].values > 0) |
| 58 | + |
| 59 | + |
| 60 | +@pytest.mark.parametrize(*optimizers) |
| 61 | +def test_constraint_adversarial(Optimizer): |
| 62 | + """Optimum at x1=-80 is in the excluded region (x1 <= 50).""" |
| 63 | + search_space = {"x1": np.arange(-100, 100, 1)} |
| 64 | + opt = Optimizer( |
| 65 | + search_space, |
| 66 | + constraints=[lambda p: p["x1"] > 50], |
| 67 | + random_state=42, |
| 68 | + ) |
| 69 | + opt.search( |
| 70 | + lambda p: -abs(p["x1"] - (-80)), |
| 71 | + n_iter=100, |
| 72 | + verbosity=False, |
| 73 | + ) |
| 74 | + |
| 75 | + values = opt.search_data["x1"].values |
| 76 | + assert np.all(values > 50) |
| 77 | + |
| 78 | + |
| 79 | +@pytest.mark.parametrize(*optimizers) |
| 80 | +def test_constraint_tracking_consistency(Optimizer): |
64 | 81 | n_iter = 30 |
65 | | - |
66 | | - def objective_function(para): |
67 | | - score = -para["x1"] * para["x1"] |
68 | | - return score |
69 | | - |
70 | | - search_space = { |
71 | | - "x1": np.arange(-10, 10, 0.1), |
72 | | - } |
73 | | - |
74 | | - def constraint_1(para): |
75 | | - return para["x1"] > -5 |
76 | | - |
77 | | - def constraint_2(para): |
78 | | - return para["x1"] < 5 |
79 | | - |
80 | | - constraints_list = [constraint_1, constraint_2] |
81 | | - |
82 | | - opt = Optimizer(search_space, constraints=constraints_list) |
83 | | - opt.search(objective_function, n_iter=n_iter) |
84 | | - |
85 | | - search_data = opt.search_data |
86 | | - x0_values = search_data["x1"].values |
87 | | - |
88 | | - print("\n search_data \n", search_data, "\n") |
89 | | - |
90 | | - assert np.all(x0_values > -5) |
91 | | - assert np.all(x0_values < 5) |
92 | | - |
93 | | - n_new_positions = 0 |
94 | | - n_new_scores = 0 |
95 | | - |
96 | | - n_current_positions = 0 |
97 | | - n_current_scores = 0 |
98 | | - |
99 | | - n_best_positions = 0 |
100 | | - n_best_scores = 0 |
101 | | - |
102 | | - for optimizer in opt.optimizers: |
103 | | - n_new_positions = n_new_positions + len(optimizer._pos_new_list) |
104 | | - n_new_scores = n_new_scores + len(optimizer._score_new_list) |
105 | | - |
106 | | - n_current_positions = n_current_positions + len(optimizer._pos_current_list) |
107 | | - n_current_scores = n_current_scores + len(optimizer._score_current_list) |
108 | | - |
109 | | - n_best_positions = n_best_positions + len(optimizer._pos_best_list) |
110 | | - n_best_scores = n_best_scores + len(optimizer._score_best_list) |
111 | | - |
112 | | - print("\n optimizer", optimizer) |
113 | | - print(" n_new_positions", optimizer._pos_new_list) |
114 | | - print(" n_new_scores", optimizer._score_new_list) |
115 | | - |
116 | | - assert n_new_positions == n_iter |
117 | | - assert n_new_scores == n_iter |
118 | | - |
119 | | - assert n_current_positions == n_current_scores |
120 | | - assert n_current_positions <= n_new_positions |
121 | | - |
122 | | - assert n_best_positions == n_best_scores |
123 | | - assert n_best_positions <= n_new_positions |
124 | | - |
125 | | - assert n_new_positions == n_new_scores |
| 82 | + search_space = {"x1": np.arange(-10, 10, 0.1)} |
| 83 | + |
| 84 | + opt = Optimizer( |
| 85 | + search_space, |
| 86 | + constraints=[lambda p: p["x1"] > -5, lambda p: p["x1"] < 5], |
| 87 | + random_state=42, |
| 88 | + ) |
| 89 | + opt.search(lambda p: -(p["x1"] ** 2), n_iter=n_iter, verbosity=False) |
| 90 | + |
| 91 | + n_new = sum(len(o._pos_new_list) for o in opt.optimizers) |
| 92 | + n_new_scores = sum(len(o._score_new_list) for o in opt.optimizers) |
| 93 | + n_current = sum(len(o._pos_current_list) for o in opt.optimizers) |
| 94 | + n_current_scores = sum(len(o._score_current_list) for o in opt.optimizers) |
| 95 | + n_best = sum(len(o._pos_best_list) for o in opt.optimizers) |
| 96 | + n_best_scores = sum(len(o._score_best_list) for o in opt.optimizers) |
| 97 | + |
| 98 | + assert n_new == n_iter |
| 99 | + assert n_new == n_new_scores |
| 100 | + assert n_current == n_current_scores |
| 101 | + assert n_current <= n_new |
| 102 | + assert n_best == n_best_scores |
| 103 | + assert n_best <= n_new |
0 commit comments