Salome HOME
Code improvements and update for iteration control
[modules/adao.git] / src / daComposant / daAlgorithms / DerivativeFreeOptimization.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2022 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import numpy, logging, scipy.optimize
24 from daCore import BasicObjects, PlatformInfo
25
26 # ==============================================================================
27 class ElementaryAlgorithm(BasicObjects.Algorithm):
28     def __init__(self):
29         BasicObjects.Algorithm.__init__(self, "DERIVATIVEFREEOPTIMIZATION")
30         self.defineRequiredParameter(
31             name     = "Minimizer",
32             default  = "BOBYQA",
33             typecast = str,
34             message  = "Minimiseur utilisé",
35             listval  = [
36                 "BOBYQA",
37                 "COBYLA",
38                 "NEWUOA",
39                 "POWELL",
40                 "SIMPLEX",
41                 "SUBPLEX",
42                 ],
43             )
44         self.defineRequiredParameter(
45             name     = "MaximumNumberOfIterations",
46             default  = 15000,
47             typecast = int,
48             message  = "Nombre maximal de pas d'optimisation",
49             minval   = -1,
50             )
51         self.defineRequiredParameter(
52             name     = "MaximumNumberOfFunctionEvaluations",
53             default  = 15000,
54             typecast = int,
55             message  = "Nombre maximal d'évaluations de la fonction",
56             minval   = -1,
57             )
58         self.defineRequiredParameter(
59             name     = "StateVariationTolerance",
60             default  = 1.e-4,
61             typecast = float,
62             message  = "Variation relative maximale de l'état lors de l'arrêt",
63             )
64         self.defineRequiredParameter(
65             name     = "CostDecrementTolerance",
66             default  = 1.e-7,
67             typecast = float,
68             message  = "Diminution relative minimale du cout lors de l'arrêt",
69             )
70         self.defineRequiredParameter(
71             name     = "QualityCriterion",
72             default  = "AugmentedWeightedLeastSquares",
73             typecast = str,
74             message  = "Critère de qualité utilisé",
75             listval  = [
76                 "AugmentedWeightedLeastSquares", "AWLS", "DA",
77                 "WeightedLeastSquares", "WLS",
78                 "LeastSquares", "LS", "L2",
79                 "AbsoluteValue", "L1",
80                 "MaximumError", "ME",
81                 ],
82             )
83         self.defineRequiredParameter(
84             name     = "StoreInternalVariables",
85             default  = False,
86             typecast = bool,
87             message  = "Stockage des variables internes ou intermédiaires du calcul",
88             )
89         self.defineRequiredParameter(
90             name     = "StoreSupplementaryCalculations",
91             default  = [],
92             typecast = tuple,
93             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
94             listval  = [
95                 "Analysis",
96                 "BMA",
97                 "CostFunctionJ",
98                 "CostFunctionJb",
99                 "CostFunctionJo",
100                 "CostFunctionJAtCurrentOptimum",
101                 "CostFunctionJbAtCurrentOptimum",
102                 "CostFunctionJoAtCurrentOptimum",
103                 "CurrentIterationNumber",
104                 "CurrentOptimum",
105                 "CurrentState",
106                 "IndexOfOptimum",
107                 "Innovation",
108                 "InnovationAtCurrentState",
109                 "OMA",
110                 "OMB",
111                 "SimulatedObservationAtBackground",
112                 "SimulatedObservationAtCurrentOptimum",
113                 "SimulatedObservationAtCurrentState",
114                 "SimulatedObservationAtOptimum",
115                 ]
116             )
117         self.defineRequiredParameter( # Pas de type
118             name     = "Bounds",
119             message  = "Liste des valeurs de bornes",
120             )
121         self.requireInputArguments(
122             mandatory= ("Xb", "Y", "HO", "R", "B"),
123             )
124         self.setAttributes(tags=(
125             "Optimization",
126             "NonLinear",
127             "MetaHeuristic",
128             ))
129
130     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
131         self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
132         #
133         if not PlatformInfo.has_nlopt and not self._parameters["Minimizer"] in ["COBYLA", "POWELL", "SIMPLEX"]:
134             logging.warning("%s Minimization by SIMPLEX is forced because %s is unavailable (COBYLA, POWELL are also available)"%(self._name,self._parameters["Minimizer"]))
135             self._parameters["Minimizer"] = "SIMPLEX"
136         #
137         Hm = HO["Direct"].appliedTo
138         #
139         BI = B.getI()
140         RI = R.getI()
141         #
142         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
143             _X  = numpy.ravel( x ).reshape((-1,1))
144             _HX = numpy.ravel( Hm( _X ) ).reshape((-1,1))
145             _Innovation = Y - _HX
146             self.StoredVariables["CurrentState"].store( _X )
147             if self._toStore("SimulatedObservationAtCurrentState") or \
148                 self._toStore("SimulatedObservationAtCurrentOptimum"):
149                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
150             if self._toStore("InnovationAtCurrentState"):
151                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
152             #
153             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
154                 if BI is None or RI is None:
155                     raise ValueError("Background and Observation error covariance matrices has to be properly defined!")
156                 Jb  = 0.5 * (_X - Xb).T @ (BI @ (_X - Xb))
157                 Jo  = 0.5 * _Innovation.T @ (RI @ _Innovation)
158             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
159                 if RI is None:
160                     raise ValueError("Observation error covariance matrix has to be properly defined!")
161                 Jb  = 0.
162                 Jo  = 0.5 * _Innovation.T @ (RI @ _Innovation)
163             elif QualityMeasure in ["LeastSquares","LS","L2"]:
164                 Jb  = 0.
165                 Jo  = 0.5 * _Innovation.T @ _Innovation
166             elif QualityMeasure in ["AbsoluteValue","L1"]:
167                 Jb  = 0.
168                 Jo  = numpy.sum( numpy.abs(_Innovation) )
169             elif QualityMeasure in ["MaximumError","ME"]:
170                 Jb  = 0.
171                 Jo  = numpy.max( numpy.abs(_Innovation) )
172             #
173             J   = float( Jb ) + float( Jo )
174             #
175             self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
176             self.StoredVariables["CostFunctionJb"].store( Jb )
177             self.StoredVariables["CostFunctionJo"].store( Jo )
178             self.StoredVariables["CostFunctionJ" ].store( J )
179             if self._toStore("IndexOfOptimum") or \
180                 self._toStore("CurrentOptimum") or \
181                 self._toStore("CostFunctionJAtCurrentOptimum") or \
182                 self._toStore("CostFunctionJbAtCurrentOptimum") or \
183                 self._toStore("CostFunctionJoAtCurrentOptimum") or \
184                 self._toStore("SimulatedObservationAtCurrentOptimum"):
185                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
186             if self._toStore("IndexOfOptimum"):
187                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
188             if self._toStore("CurrentOptimum"):
189                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
190             if self._toStore("SimulatedObservationAtCurrentOptimum"):
191                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
192             if self._toStore("CostFunctionJAtCurrentOptimum"):
193                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
194             if self._toStore("CostFunctionJbAtCurrentOptimum"):
195                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
196             if self._toStore("CostFunctionJoAtCurrentOptimum"):
197                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
198             return J
199         #
200         Xini = numpy.ravel(Xb)
201         if len(Xini) < 2 and self._parameters["Minimizer"] == "NEWUOA":
202             raise ValueError("The minimizer %s can not be used when the optimisation state dimension is 1. Please choose another minimizer."%self._parameters["Minimizer"])
203         #
204         # Minimisation de la fonctionnelle
205         # --------------------------------
206         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
207         #
208         if self._parameters["Minimizer"] == "POWELL":
209             Minimum, J_optimal, direc, niter, nfeval, rc = scipy.optimize.fmin_powell(
210                 func        = CostFunction,
211                 x0          = Xini,
212                 args        = (self._parameters["QualityCriterion"],),
213                 maxiter     = self._parameters["MaximumNumberOfIterations"]-1,
214                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
215                 xtol        = self._parameters["StateVariationTolerance"],
216                 ftol        = self._parameters["CostDecrementTolerance"],
217                 full_output = True,
218                 disp        = self._parameters["optdisp"],
219                 )
220         elif self._parameters["Minimizer"] == "COBYLA" and not PlatformInfo.has_nlopt:
221             def make_constraints(bounds):
222                 constraints = []
223                 for (i,(a,b)) in enumerate(bounds):
224                     lower = lambda x: x[i] - a
225                     upper = lambda x: b - x[i]
226                     constraints = constraints + [lower] + [upper]
227                 return constraints
228             if self._parameters["Bounds"] is None:
229                 raise ValueError("Bounds have to be given for all axes as a list of lower/upper pairs!")
230             Minimum = scipy.optimize.fmin_cobyla(
231                 func        = CostFunction,
232                 x0          = Xini,
233                 cons        = make_constraints( self._parameters["Bounds"] ),
234                 args        = (self._parameters["QualityCriterion"],),
235                 consargs    = (), # To avoid extra-args
236                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
237                 rhobeg      = 1.0,
238                 rhoend      = self._parameters["StateVariationTolerance"],
239                 catol       = 2.*self._parameters["StateVariationTolerance"],
240                 disp        = self._parameters["optdisp"],
241                 )
242         elif self._parameters["Minimizer"] == "COBYLA" and PlatformInfo.has_nlopt:
243             import nlopt
244             opt = nlopt.opt(nlopt.LN_COBYLA, Xini.size)
245             def _f(_Xx, Grad):
246                 # DFO, so no gradient
247                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
248             opt.set_min_objective(_f)
249             if self._parameters["Bounds"] is not None:
250                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
251                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
252                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
253                 if self._parameters["optdisp"]:
254                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
255                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
256                 opt.set_upper_bounds(ub)
257                 opt.set_lower_bounds(lb)
258             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
259             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
260             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
261             Minimum = opt.optimize( Xini )
262             if self._parameters["optdisp"]:
263                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
264                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
265                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
266         elif self._parameters["Minimizer"] == "SIMPLEX" and not PlatformInfo.has_nlopt:
267             Minimum, J_optimal, niter, nfeval, rc = scipy.optimize.fmin(
268                 func        = CostFunction,
269                 x0          = Xini,
270                 args        = (self._parameters["QualityCriterion"],),
271                 maxiter     = self._parameters["MaximumNumberOfIterations"]-1,
272                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
273                 xtol        = self._parameters["StateVariationTolerance"],
274                 ftol        = self._parameters["CostDecrementTolerance"],
275                 full_output = True,
276                 disp        = self._parameters["optdisp"],
277                 )
278         elif self._parameters["Minimizer"] == "SIMPLEX" and PlatformInfo.has_nlopt:
279             import nlopt
280             opt = nlopt.opt(nlopt.LN_NELDERMEAD, Xini.size)
281             def _f(_Xx, Grad):
282                 # DFO, so no gradient
283                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
284             opt.set_min_objective(_f)
285             if self._parameters["Bounds"] is not None:
286                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
287                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
288                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
289                 if self._parameters["optdisp"]:
290                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
291                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
292                 opt.set_upper_bounds(ub)
293                 opt.set_lower_bounds(lb)
294             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
295             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
296             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
297             Minimum = opt.optimize( Xini )
298             if self._parameters["optdisp"]:
299                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
300                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
301                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
302         elif self._parameters["Minimizer"] == "BOBYQA" and PlatformInfo.has_nlopt:
303             import nlopt
304             opt = nlopt.opt(nlopt.LN_BOBYQA, Xini.size)
305             def _f(_Xx, Grad):
306                 # DFO, so no gradient
307                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
308             opt.set_min_objective(_f)
309             if self._parameters["Bounds"] is not None:
310                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
311                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
312                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
313                 if self._parameters["optdisp"]:
314                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
315                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
316                 opt.set_upper_bounds(ub)
317                 opt.set_lower_bounds(lb)
318             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
319             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
320             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
321             Minimum = opt.optimize( Xini )
322             if self._parameters["optdisp"]:
323                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
324                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
325                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
326         elif self._parameters["Minimizer"] == "NEWUOA" and PlatformInfo.has_nlopt:
327             import nlopt
328             opt = nlopt.opt(nlopt.LN_NEWUOA, Xini.size)
329             def _f(_Xx, Grad):
330                 # DFO, so no gradient
331                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
332             opt.set_min_objective(_f)
333             if self._parameters["Bounds"] is not None:
334                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
335                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
336                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
337                 if self._parameters["optdisp"]:
338                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
339                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
340                 opt.set_upper_bounds(ub)
341                 opt.set_lower_bounds(lb)
342             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
343             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
344             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
345             Minimum = opt.optimize( Xini )
346             if self._parameters["optdisp"]:
347                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
348                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
349                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
350         elif self._parameters["Minimizer"] == "SUBPLEX" and PlatformInfo.has_nlopt:
351             import nlopt
352             opt = nlopt.opt(nlopt.LN_SBPLX, Xini.size)
353             def _f(_Xx, Grad):
354                 # DFO, so no gradient
355                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
356             opt.set_min_objective(_f)
357             if self._parameters["Bounds"] is not None:
358                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
359                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
360                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
361                 if self._parameters["optdisp"]:
362                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
363                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
364                 opt.set_upper_bounds(ub)
365                 opt.set_lower_bounds(lb)
366             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
367             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
368             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
369             Minimum = opt.optimize( Xini )
370             if self._parameters["optdisp"]:
371                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
372                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
373                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
374         else:
375             raise ValueError("Error in minimizer name: %s is unkown"%self._parameters["Minimizer"])
376         #
377         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
378         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
379         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
380         #
381         # Obtention de l'analyse
382         # ----------------------
383         Xa = Minimum
384         #
385         self.StoredVariables["Analysis"].store( Xa )
386         #
387         # Calculs et/ou stockages supplémentaires
388         # ---------------------------------------
389         if self._toStore("OMA") or \
390             self._toStore("SimulatedObservationAtOptimum"):
391             if self._toStore("SimulatedObservationAtCurrentState"):
392                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
393             elif self._toStore("SimulatedObservationAtCurrentOptimum"):
394                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
395             else:
396                 HXa = Hm(Xa)
397             HXa = HXa.reshape((-1,1))
398         if self._toStore("Innovation") or \
399             self._toStore("OMB") or \
400             self._toStore("SimulatedObservationAtBackground"):
401             HXb = Hm(Xb).reshape((-1,1))
402             Innovation = Y - HXb
403         if self._toStore("Innovation"):
404             self.StoredVariables["Innovation"].store( Innovation )
405         if self._toStore("OMB"):
406             self.StoredVariables["OMB"].store( Innovation )
407         if self._toStore("BMA"):
408             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
409         if self._toStore("OMA"):
410             self.StoredVariables["OMA"].store( Y - HXa )
411         if self._toStore("SimulatedObservationAtBackground"):
412             self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
413         if self._toStore("SimulatedObservationAtOptimum"):
414             self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
415         #
416         self._post_run(HO)
417         return 0
418
419 # ==============================================================================
420 if __name__ == "__main__":
421     print('\n AUTODIAGNOSTIC\n')