-
Notifications
You must be signed in to change notification settings - Fork 2.4k
/
Copy pathsnobfit.py
114 lines (99 loc) · 3.88 KB
/
snobfit.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
# This code is part of Qiskit.
#
# (C) Copyright IBM 2019, 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Stable Noisy Optimization by Branch and FIT algorithm (SNOBFIT) optimizer."""
from typing import Any, Dict, Optional, Callable, Tuple, List
import numpy as np
from qiskit.utils import optionals as _optionals
from .optimizer import Optimizer, OptimizerSupportLevel, OptimizerResult, POINT
@_optionals.HAS_SKQUANT.require_in_instance
@_optionals.HAS_SQSNOBFIT.require_in_instance
class SNOBFIT(Optimizer):
"""Stable Noisy Optimization by Branch and FIT algorithm.
SnobFit is used for the optimization of derivative-free, noisy objective functions providing
robust and fast solutions of problems with continuous variables varying within bound.
Uses skquant.opt installed with pip install scikit-quant.
For further detail, please refer to
https://github.com/scikit-quant/scikit-quant and https://qat4chem.lbl.gov/software.
"""
def __init__(
self,
maxiter: int = 1000,
maxfail: int = 10,
maxmp: int = None,
verbose: bool = False,
) -> None:
"""
Args:
maxiter: Maximum number of function evaluations.
maxmp: Maximum number of model points requested for the local fit.
Default = 2 * number of parameters + 6 set to this value when None.
maxfail: Maximum number of failures to improve the solution. Stops the algorithm
after maxfail is reached.
verbose: Provide verbose (debugging) output.
Raises:
MissingOptionalLibraryError: scikit-quant or SQSnobFit not installed
"""
super().__init__()
self._maxiter = maxiter
self._maxfail = maxfail
self._maxmp = maxmp
self._verbose = verbose
def get_support_level(self):
"""Returns support level dictionary."""
return {
"gradient": OptimizerSupportLevel.ignored,
"bounds": OptimizerSupportLevel.required,
"initial_point": OptimizerSupportLevel.required,
}
@property
def settings(self) -> Dict[str, Any]:
return {
"maxiter": self._maxiter,
"maxfail": self._maxfail,
"maxmp": self._maxmp,
"verbose": self._verbose,
}
def minimize(
self,
fun: Callable[[POINT], float],
x0: POINT,
jac: Optional[Callable[[POINT], POINT]] = None,
bounds: Optional[List[Tuple[float, float]]] = None,
) -> OptimizerResult:
import skquant.opt as skq
from SQSnobFit import optset
snobfit_settings = {
"maxmp": self._maxmp,
"maxfail": self._maxfail,
"verbose": self._verbose,
}
options = optset(optin=snobfit_settings)
# counters the error when initial point is outside the acceptable bounds
x0 = np.asarray(x0)
for idx, theta in enumerate(x0):
if abs(theta) > bounds[idx][0]:
x0[idx] = x0[idx] % bounds[idx][0]
elif abs(theta) > bounds[idx][1]:
x0[idx] = x0[idx] % bounds[idx][1]
res, history = skq.minimize(
fun,
x0,
bounds=bounds,
budget=self._maxiter,
method="snobfit",
options=options,
)
optimizer_result = OptimizerResult()
optimizer_result.x = res.optpar
optimizer_result.fun = res.optval
optimizer_result.nfev = len(history)
return optimizer_result