]> SALOME platform Git repositories - modules/adao.git/blob - src/daComposant/daAlgorithms/DerivativeFreeOptimization.py
Salome HOME
Adding CurrentIterationNumber to user information and documentation
[modules/adao.git] / src / daComposant / daAlgorithms / DerivativeFreeOptimization.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2020 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects, PlatformInfo
25 import numpy, scipy.optimize
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "DERIVATIVEFREEOPTIMIZATION")
31         self.defineRequiredParameter(
32             name     = "Minimizer",
33             default  = "BOBYQA",
34             typecast = str,
35             message  = "Minimiseur utilisé",
36             listval  = ["BOBYQA", "COBYLA", "NEWUOA", "POWELL", "SIMPLEX", "SUBPLEX"],
37             )
38         self.defineRequiredParameter(
39             name     = "MaximumNumberOfSteps",
40             default  = 15000,
41             typecast = int,
42             message  = "Nombre maximal de pas d'optimisation",
43             minval   = -1,
44             )
45         self.defineRequiredParameter(
46             name     = "MaximumNumberOfFunctionEvaluations",
47             default  = 15000,
48             typecast = int,
49             message  = "Nombre maximal d'évaluations de la fonction",
50             minval   = -1,
51             )
52         self.defineRequiredParameter(
53             name     = "StateVariationTolerance",
54             default  = 1.e-4,
55             typecast = float,
56             message  = "Variation relative maximale de l'état lors de l'arrêt",
57             )
58         self.defineRequiredParameter(
59             name     = "CostDecrementTolerance",
60             default  = 1.e-7,
61             typecast = float,
62             message  = "Diminution relative minimale du cout lors de l'arrêt",
63             )
64         self.defineRequiredParameter(
65             name     = "QualityCriterion",
66             default  = "AugmentedWeightedLeastSquares",
67             typecast = str,
68             message  = "Critère de qualité utilisé",
69             listval  = ["AugmentedWeightedLeastSquares","AWLS","DA",
70                         "WeightedLeastSquares","WLS",
71                         "LeastSquares","LS","L2",
72                         "AbsoluteValue","L1",
73                         "MaximumError","ME"],
74             )
75         self.defineRequiredParameter(
76             name     = "StoreInternalVariables",
77             default  = False,
78             typecast = bool,
79             message  = "Stockage des variables internes ou intermédiaires du calcul",
80             )
81         self.defineRequiredParameter(
82             name     = "StoreSupplementaryCalculations",
83             default  = [],
84             typecast = tuple,
85             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
86             listval  = [
87                 "Analysis",
88                 "BMA",
89                 "CostFunctionJ",
90                 "CostFunctionJb",
91                 "CostFunctionJo",
92                 "CostFunctionJAtCurrentOptimum",
93                 "CostFunctionJbAtCurrentOptimum",
94                 "CostFunctionJoAtCurrentOptimum",
95                 "CurrentIterationNumber",
96                 "CurrentOptimum",
97                 "CurrentState",
98                 "IndexOfOptimum",
99                 "Innovation",
100                 "InnovationAtCurrentState",
101                 "OMA",
102                 "OMB",
103                 "SimulatedObservationAtBackground",
104                 "SimulatedObservationAtCurrentOptimum",
105                 "SimulatedObservationAtCurrentState",
106                 "SimulatedObservationAtOptimum",
107                 ]
108             )
109         self.defineRequiredParameter( # Pas de type
110             name     = "Bounds",
111             message  = "Liste des valeurs de bornes",
112             )
113         self.requireInputArguments(
114             mandatory= ("Xb", "Y", "HO", "R", "B" ),
115             )
116         self.setAttributes(tags=(
117             "Optimization",
118             "NonLinear",
119             "MetaHeuristic",
120             ))
121
122     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
123         self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
124         #
125         if not PlatformInfo.has_nlopt and not self._parameters["Minimizer"] in ["COBYLA", "POWELL", "SIMPLEX"]:
126             logging.warning("%s Minimization by SIMPLEX is forced because %s is unavailable (COBYLA, POWELL are also available)"%(self._name,self._parameters["Minimizer"]))
127             self._parameters["Minimizer"] = "SIMPLEX"
128         #
129         # Opérateurs
130         # ----------
131         Hm = HO["Direct"].appliedTo
132         #
133         # Précalcul des inversions de B et R
134         # ----------------------------------
135         BI = B.getI()
136         RI = R.getI()
137         #
138         # Définition de la fonction-coût
139         # ------------------------------
140         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
141             _X  = numpy.asmatrix(numpy.ravel( x )).T
142             self.StoredVariables["CurrentState"].store( _X )
143             _HX = Hm( _X )
144             _HX = numpy.asmatrix(numpy.ravel( _HX )).T
145             _Innovation = Y - _HX
146             if self._toStore("SimulatedObservationAtCurrentState") or \
147                 self._toStore("SimulatedObservationAtCurrentOptimum"):
148                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
149             if self._toStore("InnovationAtCurrentState"):
150                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
151             #
152             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
153                 if BI is None or RI is None:
154                     raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
155                 Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
156                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
157             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
158                 if RI is None:
159                     raise ValueError("Observation error covariance matrix has to be properly defined!")
160                 Jb  = 0.
161                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
162             elif QualityMeasure in ["LeastSquares","LS","L2"]:
163                 Jb  = 0.
164                 Jo  = 0.5 * (_Innovation).T * (_Innovation)
165             elif QualityMeasure in ["AbsoluteValue","L1"]:
166                 Jb  = 0.
167                 Jo  = numpy.sum( numpy.abs(_Innovation) )
168             elif QualityMeasure in ["MaximumError","ME"]:
169                 Jb  = 0.
170                 Jo  = numpy.max( numpy.abs(_Innovation) )
171             #
172             J   = float( Jb ) + float( Jo )
173             #
174             self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
175             self.StoredVariables["CostFunctionJb"].store( Jb )
176             self.StoredVariables["CostFunctionJo"].store( Jo )
177             self.StoredVariables["CostFunctionJ" ].store( J )
178             if self._toStore("IndexOfOptimum") or \
179                 self._toStore("CurrentOptimum") or \
180                 self._toStore("CostFunctionJAtCurrentOptimum") or \
181                 self._toStore("CostFunctionJbAtCurrentOptimum") or \
182                 self._toStore("CostFunctionJoAtCurrentOptimum") or \
183                 self._toStore("SimulatedObservationAtCurrentOptimum"):
184                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
185             if self._toStore("IndexOfOptimum"):
186                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
187             if self._toStore("CurrentOptimum"):
188                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
189             if self._toStore("SimulatedObservationAtCurrentOptimum"):
190                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
191             if self._toStore("CostFunctionJAtCurrentOptimum"):
192                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
193             if self._toStore("CostFunctionJbAtCurrentOptimum"):
194                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
195             if self._toStore("CostFunctionJoAtCurrentOptimum"):
196                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
197             return J
198         #
199         # Point de démarrage de l'optimisation : Xini = Xb
200         # ------------------------------------
201         Xini = numpy.ravel(Xb)
202         if len(Xini) < 2 and self._parameters["Minimizer"] == "NEWUOA":
203             raise ValueError("The minimizer %s can not be used when the optimisation state dimension is 1. Please choose another minimizer."%self._parameters["Minimizer"])
204         #
205         # Minimisation de la fonctionnelle
206         # --------------------------------
207         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
208         #
209         if self._parameters["Minimizer"] == "POWELL":
210             Minimum, J_optimal, direc, niter, nfeval, rc = scipy.optimize.fmin_powell(
211                 func        = CostFunction,
212                 x0          = Xini,
213                 args        = (self._parameters["QualityCriterion"],),
214                 maxiter     = self._parameters["MaximumNumberOfSteps"]-1,
215                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
216                 xtol        = self._parameters["StateVariationTolerance"],
217                 ftol        = self._parameters["CostDecrementTolerance"],
218                 full_output = True,
219                 disp        = self._parameters["optdisp"],
220                 )
221         elif self._parameters["Minimizer"] == "COBYLA" and not PlatformInfo.has_nlopt:
222             def make_constraints(bounds):
223                 constraints = []
224                 for (i,(a,b)) in enumerate(bounds):
225                     lower = lambda x: x[i] - a
226                     upper = lambda x: b - x[i]
227                     constraints = constraints + [lower] + [upper]
228                 return constraints
229             if self._parameters["Bounds"] is None:
230                 raise ValueError("Bounds have to be given for all axes as a list of lower/upper pairs!")
231             Minimum = scipy.optimize.fmin_cobyla(
232                 func        = CostFunction,
233                 x0          = Xini,
234                 cons        = make_constraints( self._parameters["Bounds"] ),
235                 args        = (self._parameters["QualityCriterion"],),
236                 consargs    = (), # To avoid extra-args
237                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
238                 rhobeg      = 1.0,
239                 rhoend      = self._parameters["StateVariationTolerance"],
240                 catol       = 2.*self._parameters["StateVariationTolerance"],
241                 disp        = self._parameters["optdisp"],
242                 )
243         elif self._parameters["Minimizer"] == "COBYLA" and PlatformInfo.has_nlopt:
244             import nlopt
245             opt = nlopt.opt(nlopt.LN_COBYLA, Xini.size)
246             def _f(_Xx, Grad):
247                 # DFO, so no gradient
248                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
249             opt.set_min_objective(_f)
250             if self._parameters["Bounds"] is not None:
251                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
252                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
253                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
254                 if self._parameters["optdisp"]:
255                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
256                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
257                 opt.set_upper_bounds(ub)
258                 opt.set_lower_bounds(lb)
259             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
260             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
261             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
262             Minimum = opt.optimize( Xini )
263             if self._parameters["optdisp"]:
264                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
265                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
266                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
267         elif self._parameters["Minimizer"] == "SIMPLEX" and not PlatformInfo.has_nlopt:
268             Minimum, J_optimal, niter, nfeval, rc = scipy.optimize.fmin(
269                 func        = CostFunction,
270                 x0          = Xini,
271                 args        = (self._parameters["QualityCriterion"],),
272                 maxiter     = self._parameters["MaximumNumberOfSteps"]-1,
273                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
274                 xtol        = self._parameters["StateVariationTolerance"],
275                 ftol        = self._parameters["CostDecrementTolerance"],
276                 full_output = True,
277                 disp        = self._parameters["optdisp"],
278                 )
279         elif self._parameters["Minimizer"] == "SIMPLEX" and PlatformInfo.has_nlopt:
280             import nlopt
281             opt = nlopt.opt(nlopt.LN_NELDERMEAD, Xini.size)
282             def _f(_Xx, Grad):
283                 # DFO, so no gradient
284                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
285             opt.set_min_objective(_f)
286             if self._parameters["Bounds"] is not None:
287                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
288                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
289                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
290                 if self._parameters["optdisp"]:
291                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
292                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
293                 opt.set_upper_bounds(ub)
294                 opt.set_lower_bounds(lb)
295             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
296             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
297             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
298             Minimum = opt.optimize( Xini )
299             if self._parameters["optdisp"]:
300                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
301                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
302                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
303         elif self._parameters["Minimizer"] == "BOBYQA" and PlatformInfo.has_nlopt:
304             import nlopt
305             opt = nlopt.opt(nlopt.LN_BOBYQA, Xini.size)
306             def _f(_Xx, Grad):
307                 # DFO, so no gradient
308                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
309             opt.set_min_objective(_f)
310             if self._parameters["Bounds"] is not None:
311                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
312                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
313                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
314                 if self._parameters["optdisp"]:
315                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
316                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
317                 opt.set_upper_bounds(ub)
318                 opt.set_lower_bounds(lb)
319             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
320             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
321             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
322             Minimum = opt.optimize( Xini )
323             if self._parameters["optdisp"]:
324                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
325                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
326                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
327         elif self._parameters["Minimizer"] == "NEWUOA" and PlatformInfo.has_nlopt:
328             import nlopt
329             opt = nlopt.opt(nlopt.LN_NEWUOA, Xini.size)
330             def _f(_Xx, Grad):
331                 # DFO, so no gradient
332                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
333             opt.set_min_objective(_f)
334             if self._parameters["Bounds"] is not None:
335                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
336                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
337                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
338                 if self._parameters["optdisp"]:
339                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
340                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
341                 opt.set_upper_bounds(ub)
342                 opt.set_lower_bounds(lb)
343             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
344             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
345             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
346             Minimum = opt.optimize( Xini )
347             if self._parameters["optdisp"]:
348                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
349                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
350                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
351         elif self._parameters["Minimizer"] == "SUBPLEX" and PlatformInfo.has_nlopt:
352             import nlopt
353             opt = nlopt.opt(nlopt.LN_SBPLX, Xini.size)
354             def _f(_Xx, Grad):
355                 # DFO, so no gradient
356                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
357             opt.set_min_objective(_f)
358             if self._parameters["Bounds"] is not None:
359                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
360                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
361                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
362                 if self._parameters["optdisp"]:
363                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
364                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
365                 opt.set_upper_bounds(ub)
366                 opt.set_lower_bounds(lb)
367             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
368             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
369             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
370             Minimum = opt.optimize( Xini )
371             if self._parameters["optdisp"]:
372                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
373                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
374                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
375         else:
376             raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"])
377         #
378         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
379         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
380         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
381         #
382         # Obtention de l'analyse
383         # ----------------------
384         Xa = numpy.asmatrix(numpy.ravel( Minimum )).T
385         #
386         self.StoredVariables["Analysis"].store( Xa.A1 )
387         #
388         # Calculs et/ou stockages supplémentaires
389         # ---------------------------------------
390         if self._toStore("OMA" ) or \
391             self._toStore("SimulatedObservationAtOptimum"):
392             if self._toStore("SimulatedObservationAtCurrentState"):
393                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
394             elif self._toStore("SimulatedObservationAtCurrentOptimum"):
395                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
396             else:
397                 HXa = Hm(Xa)
398         if self._toStore("Innovation") or \
399             self._toStore("OMB"):
400             d  = Y - HXb
401         if self._toStore("Innovation"):
402             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
403         if self._toStore("OMB"):
404             self.StoredVariables["OMB"].store( numpy.ravel(d) )
405         if self._toStore("BMA"):
406             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
407         if self._toStore("OMA"):
408             self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
409         if self._toStore("SimulatedObservationAtBackground"):
410             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) )
411         if self._toStore("SimulatedObservationAtOptimum"):
412             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
413         #
414         self._post_run()
415         return 0
416
417 # ==============================================================================
418 if __name__ == "__main__":
419     print('\n AUTODIAGNOSTIC\n')