-
Notifications
You must be signed in to change notification settings - Fork 36
/
Copy pathrs.py
159 lines (131 loc) · 6.55 KB
/
rs.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
import numpy as np # engine for numerical computing
# abstract class of all Black-Box Optimizers (BBO)
from pypop7.optimizers.core.optimizer import Optimizer
class RS(Optimizer):
"""Random (stochastic) Search (optimization) (RS).
This is the **abstract** class for all `RS` classes. Please use any of
its instantiated subclasses to optimize the black-box problem at hand.
.. note:: `"Local search was reinvigorated in the early 1990s by
surprisingly good results for large (combinatorial) problems ...
and by the incorporation of randomness, multiple simultaneous
searches, and other improvements."---[Russell&Norvig, 2022, AIMA]
<http://aima.cs.berkeley.edu/global-index.html>`_
**Randomized Local Search (RLS)** is often seen as one of `heuristic
optimization algorithms, also called hill climbing, steepest ascent,
or greedy search <>`_.
Parameters
----------
problem : dict
problem arguments with the following common settings (`keys`):
* 'fitness_function' - objective function to be **minimized** (`func`),
* 'ndim_problem' - number of dimensionality (`int`),
* 'upper_boundary' - upper boundary of search range (`array_like`),
* 'lower_boundary' - lower boundary of search range (`array_like`).
options : dict
optimizer options with the following common settings (`keys`):
* 'max_function_evaluations' - maximum of function evaluations (`int`, default: `np.inf`),
* 'max_runtime' - maximal runtime to be allowed (`float`, default: `np.inf`),
* 'seed_rng' - seed for random number generation needed to be *explicitly* set (`int`);
and with the following particular setting (`key`):
* 'x' - initial (starting) point (`array_like`).
Attributes
----------
x : `array_like`
initial (starting) point.
Methods
-------
References
----------
Nesterov, Y. and Spokoiny, V., 2017.
Random gradient-free minimization of convex functions.
Foundations of Computational Mathematics, 17(2), pp.527-566.
https://link.springer.com/article/10.1007/s10208-015-9296-2
Bergstra, J. and Bengio, Y., 2012.
Random search for hyper-parameter optimization.
Journal of Machine Learning Research, 13(2).
https://www.jmlr.org/papers/v13/bergstra12a.html
Appel, M.J., Labarre, R. and Radulovic, D., 2004.
On accelerated random search.
SIAM Journal on Optimization, 14(3), pp.708-731.
https://epubs.siam.org/doi/abs/10.1137/S105262340240063X
Schmidhuber, J., Hochreiter, S. and Bengio, Y., 2001.
Evaluating benchmark problems by random guessing.
A Field Guide to Dynamical Recurrent Networks, pp.231-235.
https://ml.jku.at/publications/older/ch9.pdf
Schmidhuber, J. and Hochreiter, S., 1996.
Guessing can outperform many long time lag algorithms.
Technical Report.
https://www.bioinf.jku.at/publications/older/3204.pdf
Rastrigin, L.A., 1986.
Random search as a method for optimization and adaptation.
In Stochastic Optimization.
https://link.springer.com/chapter/10.1007/BFb0007129
Solis, F.J. and Wets, R.J.B., 1981.
Minimization by random search techniques.
Mathematics of Operations Research, 6(1), pp.19-30.
https://pubsonline.informs.org/doi/abs/10.1287/moor.6.1.19
Schrack, G. and Choit, M., 1976.
Optimized relative step size random searches.
Mathematical Programming, 10(1), pp.230-244.
https://link.springer.com/article/10.1007/BF01580669
Schumer, M.A. and Steiglitz, K., 1968.
Adaptive step size random search.
IEEE Transactions on Automatic Control, 13(3), pp.270-276.
https://ieeexplore.ieee.org/abstract/document/1098903
Matyas, J., 1965.
`Random optimization.
<https://tinyurl.com/yj398bs5>`_
Automation and Remote control, 26(2), pp.246-253.
Karnopp, D.C., 1963.
Random search techniques for optimization problems.
Automatica, 1(2-3), pp.111-121.
https://www.sciencedirect.com/science/article/abs/pii/0005109863900189
Rastrigin, L.A., 1963.
The convergence of the random search method in the extremal control of a many parameter system.
Automaton & Remote Control, 24, pp.1337-1342.
https://tinyurl.com/djfdnpx4
Brooks, S.H., 1958.
A discussion of random methods for seeking maxima.
Operations Research, 6(2), pp.244-251.
https://pubsonline.informs.org/doi/abs/10.1287/opre.6.2.244
Ashby, W.R., 1952.
`Design for a brain: The origin of adaptive behaviour.
<https://link.springer.com/book/10.1007/978-94-015-1320-3>`_
Springer.
"""
def __init__(self, problem, options):
Optimizer.__init__(self, problem, options)
self.x = options.get('x') # initial (starting) point
# reset its default value from 10 to 1000, since typically more iterations can be run
# for individual-based iterative search
self.verbose = options.get('verbose', 1000)
self._n_generations = 0 # number of generations
def initialize(self):
raise NotImplementedError
def iterate(self): # for each iteration (generation)
raise NotImplementedError
def _print_verbose_info(self, fitness, y):
if self.saving_fitness:
if not np.isscalar(y):
fitness.extend(y)
else:
fitness.append(y)
if self.verbose and ((not self._n_generations % self.verbose) or (self.termination_signal > 0)):
info = ' * Generation {:d}: best_so_far_y {:7.5e}, min(y) {:7.5e} & Evaluations {:d}'
print(info.format(self._n_generations, self.best_so_far_y, np.min(y), self.n_function_evaluations))
def _collect(self, fitness, y=None):
if y is not None:
self._print_verbose_info(fitness, y)
results = Optimizer._collect(self, fitness)
results['_n_generations'] = self._n_generations
return results
def optimize(self, fitness_function=None, args=None): # for all iterations (generations)
fitness = Optimizer.optimize(self, fitness_function)
x = self.initialize() # starting point
y = self._evaluate_fitness(x, args) # fitness of starting point
while not self._check_terminations():
self._print_verbose_info(fitness, y)
x = self.iterate() # to sample a new point
y = self._evaluate_fitness(x, args) # to evaluate the new point
self._n_generations += 1
return self._collect(fitness, y)