]> SALOME platform Git repositories - modules/adao.git/blob - src/daComposant/daAlgorithms/DerivativeFreeOptimization.py
Salome HOME
Minor internal modifications and bounds corrections
[modules/adao.git] / src / daComposant / daAlgorithms / DerivativeFreeOptimization.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2021 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects, PlatformInfo
25 import numpy, scipy.optimize
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "DERIVATIVEFREEOPTIMIZATION")
31         self.defineRequiredParameter(
32             name     = "Minimizer",
33             default  = "BOBYQA",
34             typecast = str,
35             message  = "Minimiseur utilisé",
36             listval  = [
37                 "BOBYQA",
38                 "COBYLA",
39                 "NEWUOA",
40                 "POWELL",
41                 "SIMPLEX",
42                 "SUBPLEX",
43                 ],
44             )
45         self.defineRequiredParameter(
46             name     = "MaximumNumberOfSteps",
47             default  = 15000,
48             typecast = int,
49             message  = "Nombre maximal de pas d'optimisation",
50             minval   = -1,
51             )
52         self.defineRequiredParameter(
53             name     = "MaximumNumberOfFunctionEvaluations",
54             default  = 15000,
55             typecast = int,
56             message  = "Nombre maximal d'évaluations de la fonction",
57             minval   = -1,
58             )
59         self.defineRequiredParameter(
60             name     = "StateVariationTolerance",
61             default  = 1.e-4,
62             typecast = float,
63             message  = "Variation relative maximale de l'état lors de l'arrêt",
64             )
65         self.defineRequiredParameter(
66             name     = "CostDecrementTolerance",
67             default  = 1.e-7,
68             typecast = float,
69             message  = "Diminution relative minimale du cout lors de l'arrêt",
70             )
71         self.defineRequiredParameter(
72             name     = "QualityCriterion",
73             default  = "AugmentedWeightedLeastSquares",
74             typecast = str,
75             message  = "Critère de qualité utilisé",
76             listval  = ["AugmentedWeightedLeastSquares","AWLS","DA",
77                         "WeightedLeastSquares","WLS",
78                         "LeastSquares","LS","L2",
79                         "AbsoluteValue","L1",
80                         "MaximumError","ME"],
81             )
82         self.defineRequiredParameter(
83             name     = "StoreInternalVariables",
84             default  = False,
85             typecast = bool,
86             message  = "Stockage des variables internes ou intermédiaires du calcul",
87             )
88         self.defineRequiredParameter(
89             name     = "StoreSupplementaryCalculations",
90             default  = [],
91             typecast = tuple,
92             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
93             listval  = [
94                 "Analysis",
95                 "BMA",
96                 "CostFunctionJ",
97                 "CostFunctionJb",
98                 "CostFunctionJo",
99                 "CostFunctionJAtCurrentOptimum",
100                 "CostFunctionJbAtCurrentOptimum",
101                 "CostFunctionJoAtCurrentOptimum",
102                 "CurrentIterationNumber",
103                 "CurrentOptimum",
104                 "CurrentState",
105                 "IndexOfOptimum",
106                 "Innovation",
107                 "InnovationAtCurrentState",
108                 "OMA",
109                 "OMB",
110                 "SimulatedObservationAtBackground",
111                 "SimulatedObservationAtCurrentOptimum",
112                 "SimulatedObservationAtCurrentState",
113                 "SimulatedObservationAtOptimum",
114                 ]
115             )
116         self.defineRequiredParameter( # Pas de type
117             name     = "Bounds",
118             message  = "Liste des valeurs de bornes",
119             )
120         self.requireInputArguments(
121             mandatory= ("Xb", "Y", "HO", "R", "B" ),
122             )
123         self.setAttributes(tags=(
124             "Optimization",
125             "NonLinear",
126             "MetaHeuristic",
127             ))
128
129     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
130         self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
131         #
132         if not PlatformInfo.has_nlopt and not self._parameters["Minimizer"] in ["COBYLA", "POWELL", "SIMPLEX"]:
133             logging.warning("%s Minimization by SIMPLEX is forced because %s is unavailable (COBYLA, POWELL are also available)"%(self._name,self._parameters["Minimizer"]))
134             self._parameters["Minimizer"] = "SIMPLEX"
135         #
136         # Opérateurs
137         # ----------
138         Hm = HO["Direct"].appliedTo
139         #
140         # Précalcul des inversions de B et R
141         # ----------------------------------
142         BI = B.getI()
143         RI = R.getI()
144         #
145         # Définition de la fonction-coût
146         # ------------------------------
147         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
148             _X  = numpy.asmatrix(numpy.ravel( x )).T
149             self.StoredVariables["CurrentState"].store( _X )
150             _HX = Hm( _X )
151             _HX = numpy.asmatrix(numpy.ravel( _HX )).T
152             _Innovation = Y - _HX
153             if self._toStore("SimulatedObservationAtCurrentState") or \
154                 self._toStore("SimulatedObservationAtCurrentOptimum"):
155                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
156             if self._toStore("InnovationAtCurrentState"):
157                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
158             #
159             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
160                 if BI is None or RI is None:
161                     raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
162                 Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
163                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
164             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
165                 if RI is None:
166                     raise ValueError("Observation error covariance matrix has to be properly defined!")
167                 Jb  = 0.
168                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
169             elif QualityMeasure in ["LeastSquares","LS","L2"]:
170                 Jb  = 0.
171                 Jo  = 0.5 * (_Innovation).T * (_Innovation)
172             elif QualityMeasure in ["AbsoluteValue","L1"]:
173                 Jb  = 0.
174                 Jo  = numpy.sum( numpy.abs(_Innovation) )
175             elif QualityMeasure in ["MaximumError","ME"]:
176                 Jb  = 0.
177                 Jo  = numpy.max( numpy.abs(_Innovation) )
178             #
179             J   = float( Jb ) + float( Jo )
180             #
181             self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
182             self.StoredVariables["CostFunctionJb"].store( Jb )
183             self.StoredVariables["CostFunctionJo"].store( Jo )
184             self.StoredVariables["CostFunctionJ" ].store( J )
185             if self._toStore("IndexOfOptimum") or \
186                 self._toStore("CurrentOptimum") or \
187                 self._toStore("CostFunctionJAtCurrentOptimum") or \
188                 self._toStore("CostFunctionJbAtCurrentOptimum") or \
189                 self._toStore("CostFunctionJoAtCurrentOptimum") or \
190                 self._toStore("SimulatedObservationAtCurrentOptimum"):
191                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
192             if self._toStore("IndexOfOptimum"):
193                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
194             if self._toStore("CurrentOptimum"):
195                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
196             if self._toStore("SimulatedObservationAtCurrentOptimum"):
197                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
198             if self._toStore("CostFunctionJAtCurrentOptimum"):
199                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
200             if self._toStore("CostFunctionJbAtCurrentOptimum"):
201                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
202             if self._toStore("CostFunctionJoAtCurrentOptimum"):
203                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
204             return J
205         #
206         # Point de démarrage de l'optimisation : Xini = Xb
207         # ------------------------------------
208         Xini = numpy.ravel(Xb)
209         if len(Xini) < 2 and self._parameters["Minimizer"] == "NEWUOA":
210             raise ValueError("The minimizer %s can not be used when the optimisation state dimension is 1. Please choose another minimizer."%self._parameters["Minimizer"])
211         #
212         # Minimisation de la fonctionnelle
213         # --------------------------------
214         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
215         #
216         if self._parameters["Minimizer"] == "POWELL":
217             Minimum, J_optimal, direc, niter, nfeval, rc = scipy.optimize.fmin_powell(
218                 func        = CostFunction,
219                 x0          = Xini,
220                 args        = (self._parameters["QualityCriterion"],),
221                 maxiter     = self._parameters["MaximumNumberOfSteps"]-1,
222                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
223                 xtol        = self._parameters["StateVariationTolerance"],
224                 ftol        = self._parameters["CostDecrementTolerance"],
225                 full_output = True,
226                 disp        = self._parameters["optdisp"],
227                 )
228         elif self._parameters["Minimizer"] == "COBYLA" and not PlatformInfo.has_nlopt:
229             def make_constraints(bounds):
230                 constraints = []
231                 for (i,(a,b)) in enumerate(bounds):
232                     lower = lambda x: x[i] - a
233                     upper = lambda x: b - x[i]
234                     constraints = constraints + [lower] + [upper]
235                 return constraints
236             if self._parameters["Bounds"] is None:
237                 raise ValueError("Bounds have to be given for all axes as a list of lower/upper pairs!")
238             Minimum = scipy.optimize.fmin_cobyla(
239                 func        = CostFunction,
240                 x0          = Xini,
241                 cons        = make_constraints( self._parameters["Bounds"] ),
242                 args        = (self._parameters["QualityCriterion"],),
243                 consargs    = (), # To avoid extra-args
244                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
245                 rhobeg      = 1.0,
246                 rhoend      = self._parameters["StateVariationTolerance"],
247                 catol       = 2.*self._parameters["StateVariationTolerance"],
248                 disp        = self._parameters["optdisp"],
249                 )
250         elif self._parameters["Minimizer"] == "COBYLA" and PlatformInfo.has_nlopt:
251             import nlopt
252             opt = nlopt.opt(nlopt.LN_COBYLA, Xini.size)
253             def _f(_Xx, Grad):
254                 # DFO, so no gradient
255                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
256             opt.set_min_objective(_f)
257             if self._parameters["Bounds"] is not None:
258                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
259                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
260                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
261                 if self._parameters["optdisp"]:
262                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
263                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
264                 opt.set_upper_bounds(ub)
265                 opt.set_lower_bounds(lb)
266             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
267             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
268             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
269             Minimum = opt.optimize( Xini )
270             if self._parameters["optdisp"]:
271                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
272                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
273                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
274         elif self._parameters["Minimizer"] == "SIMPLEX" and not PlatformInfo.has_nlopt:
275             Minimum, J_optimal, niter, nfeval, rc = scipy.optimize.fmin(
276                 func        = CostFunction,
277                 x0          = Xini,
278                 args        = (self._parameters["QualityCriterion"],),
279                 maxiter     = self._parameters["MaximumNumberOfSteps"]-1,
280                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
281                 xtol        = self._parameters["StateVariationTolerance"],
282                 ftol        = self._parameters["CostDecrementTolerance"],
283                 full_output = True,
284                 disp        = self._parameters["optdisp"],
285                 )
286         elif self._parameters["Minimizer"] == "SIMPLEX" and PlatformInfo.has_nlopt:
287             import nlopt
288             opt = nlopt.opt(nlopt.LN_NELDERMEAD, Xini.size)
289             def _f(_Xx, Grad):
290                 # DFO, so no gradient
291                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
292             opt.set_min_objective(_f)
293             if self._parameters["Bounds"] is not None:
294                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
295                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
296                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
297                 if self._parameters["optdisp"]:
298                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
299                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
300                 opt.set_upper_bounds(ub)
301                 opt.set_lower_bounds(lb)
302             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
303             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
304             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
305             Minimum = opt.optimize( Xini )
306             if self._parameters["optdisp"]:
307                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
308                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
309                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
310         elif self._parameters["Minimizer"] == "BOBYQA" and PlatformInfo.has_nlopt:
311             import nlopt
312             opt = nlopt.opt(nlopt.LN_BOBYQA, Xini.size)
313             def _f(_Xx, Grad):
314                 # DFO, so no gradient
315                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
316             opt.set_min_objective(_f)
317             if self._parameters["Bounds"] is not None:
318                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
319                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
320                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
321                 if self._parameters["optdisp"]:
322                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
323                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
324                 opt.set_upper_bounds(ub)
325                 opt.set_lower_bounds(lb)
326             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
327             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
328             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
329             Minimum = opt.optimize( Xini )
330             if self._parameters["optdisp"]:
331                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
332                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
333                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
334         elif self._parameters["Minimizer"] == "NEWUOA" and PlatformInfo.has_nlopt:
335             import nlopt
336             opt = nlopt.opt(nlopt.LN_NEWUOA, Xini.size)
337             def _f(_Xx, Grad):
338                 # DFO, so no gradient
339                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
340             opt.set_min_objective(_f)
341             if self._parameters["Bounds"] is not None:
342                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
343                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
344                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
345                 if self._parameters["optdisp"]:
346                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
347                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
348                 opt.set_upper_bounds(ub)
349                 opt.set_lower_bounds(lb)
350             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
351             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
352             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
353             Minimum = opt.optimize( Xini )
354             if self._parameters["optdisp"]:
355                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
356                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
357                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
358         elif self._parameters["Minimizer"] == "SUBPLEX" and PlatformInfo.has_nlopt:
359             import nlopt
360             opt = nlopt.opt(nlopt.LN_SBPLX, Xini.size)
361             def _f(_Xx, Grad):
362                 # DFO, so no gradient
363                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
364             opt.set_min_objective(_f)
365             if self._parameters["Bounds"] is not None:
366                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
367                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
368                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
369                 if self._parameters["optdisp"]:
370                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
371                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
372                 opt.set_upper_bounds(ub)
373                 opt.set_lower_bounds(lb)
374             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
375             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
376             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
377             Minimum = opt.optimize( Xini )
378             if self._parameters["optdisp"]:
379                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
380                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
381                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
382         else:
383             raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"])
384         #
385         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
386         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
387         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
388         #
389         # Obtention de l'analyse
390         # ----------------------
391         Xa = numpy.asmatrix(numpy.ravel( Minimum )).T
392         #
393         self.StoredVariables["Analysis"].store( Xa.A1 )
394         #
395         # Calculs et/ou stockages supplémentaires
396         # ---------------------------------------
397         if self._toStore("OMA" ) or \
398             self._toStore("SimulatedObservationAtOptimum"):
399             if self._toStore("SimulatedObservationAtCurrentState"):
400                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
401             elif self._toStore("SimulatedObservationAtCurrentOptimum"):
402                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
403             else:
404                 HXa = Hm(Xa)
405         if self._toStore("Innovation") or \
406             self._toStore("OMB"):
407             d  = Y - HXb
408         if self._toStore("Innovation"):
409             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
410         if self._toStore("OMB"):
411             self.StoredVariables["OMB"].store( numpy.ravel(d) )
412         if self._toStore("BMA"):
413             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
414         if self._toStore("OMA"):
415             self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
416         if self._toStore("SimulatedObservationAtBackground"):
417             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) )
418         if self._toStore("SimulatedObservationAtOptimum"):
419             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
420         #
421         self._post_run()
422         return 0
423
424 # ==============================================================================
425 if __name__ == "__main__":
426     print('\n AUTODIAGNOSTIC\n')