Salome HOME
Updating copyright date information (1)
[modules/adao.git] / src / daComposant / daAlgorithms / DerivativeFreeOptimization.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2018 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects, PlatformInfo
25 import numpy, scipy.optimize
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "DERIVATIVEFREEOPTIMIZATION")
31         self.defineRequiredParameter(
32             name     = "Minimizer",
33             default  = "BOBYQA",
34             typecast = str,
35             message  = "Minimiseur utilisé",
36             listval  = ["BOBYQA", "COBYLA", "NEWUOA", "POWELL", "SIMPLEX", "SUBPLEX"],
37             )
38         self.defineRequiredParameter(
39             name     = "MaximumNumberOfSteps",
40             default  = 15000,
41             typecast = int,
42             message  = "Nombre maximal de pas d'optimisation",
43             minval   = -1,
44             )
45         self.defineRequiredParameter(
46             name     = "MaximumNumberOfFunctionEvaluations",
47             default  = 15000,
48             typecast = int,
49             message  = "Nombre maximal d'évaluations de la fonction",
50             minval   = -1,
51             )
52         self.defineRequiredParameter(
53             name     = "StateVariationTolerance",
54             default  = 1.e-4,
55             typecast = float,
56             message  = "Variation relative maximale de l'état lors de l'arrêt",
57             )
58         self.defineRequiredParameter(
59             name     = "CostDecrementTolerance",
60             default  = 1.e-7,
61             typecast = float,
62             message  = "Diminution relative minimale du cout lors de l'arrêt",
63             )
64         self.defineRequiredParameter(
65             name     = "QualityCriterion",
66             default  = "AugmentedWeightedLeastSquares",
67             typecast = str,
68             message  = "Critère de qualité utilisé",
69             listval  = ["AugmentedWeightedLeastSquares","AWLS","DA",
70                         "WeightedLeastSquares","WLS",
71                         "LeastSquares","LS","L2",
72                         "AbsoluteValue","L1",
73                         "MaximumError","ME"],
74             )
75         self.defineRequiredParameter(
76             name     = "StoreInternalVariables",
77             default  = False,
78             typecast = bool,
79             message  = "Stockage des variables internes ou intermédiaires du calcul",
80             )
81         self.defineRequiredParameter(
82             name     = "StoreSupplementaryCalculations",
83             default  = [],
84             typecast = tuple,
85             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
86             listval  = [
87                 "CurrentState",
88                 "CostFunctionJ",
89                 "CostFunctionJb",
90                 "CostFunctionJo",
91                 "CostFunctionJAtCurrentOptimum",
92                 "CostFunctionJbAtCurrentOptimum",
93                 "CostFunctionJoAtCurrentOptimum",
94                 "CurrentOptimum",
95                 "IndexOfOptimum",
96                 "InnovationAtCurrentState",
97                 "BMA",
98                 "OMA",
99                 "OMB",
100                 "SimulatedObservationAtBackground",
101                 "SimulatedObservationAtCurrentOptimum",
102                 "SimulatedObservationAtCurrentState",
103                 "SimulatedObservationAtOptimum",
104                 ]
105             )
106         self.defineRequiredParameter( # Pas de type
107             name     = "Bounds",
108             message  = "Liste des valeurs de bornes",
109             )
110         self.requireInputArguments(
111             mandatory= ("Xb", "Y", "HO", "R", "B" ),
112             )
113
114     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
115         self._pre_run(Parameters, Xb, Y, R, B, Q)
116         #
117         if not PlatformInfo.has_nlopt and not self._parameters["Minimizer"] in ["COBYLA", "POWELL", "SIMPLEX"]:
118             logging.debug("%s Absence de NLopt, utilisation forcee du minimiseur SIMPLEX"%(self._name,))
119             self._parameters["Minimizer"] = "SIMPLEX"
120         #
121         # Opérateurs
122         # ----------
123         Hm = HO["Direct"].appliedTo
124         #
125         # Précalcul des inversions de B et R
126         # ----------------------------------
127         BI = B.getI()
128         RI = R.getI()
129         #
130         # Définition de la fonction-coût
131         # ------------------------------
132         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
133             _X  = numpy.asmatrix(numpy.ravel( x )).T
134             self.StoredVariables["CurrentState"].store( _X )
135             _HX = Hm( _X )
136             _HX = numpy.asmatrix(numpy.ravel( _HX )).T
137             _Innovation = Y - _HX
138             if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \
139                "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
140                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
141             if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
142                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
143             #
144             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
145                 if BI is None or RI is None:
146                     raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
147                 Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
148                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
149             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
150                 if RI is None:
151                     raise ValueError("Observation error covariance matrix has to be properly defined!")
152                 Jb  = 0.
153                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
154             elif QualityMeasure in ["LeastSquares","LS","L2"]:
155                 Jb  = 0.
156                 Jo  = 0.5 * (_Innovation).T * (_Innovation)
157             elif QualityMeasure in ["AbsoluteValue","L1"]:
158                 Jb  = 0.
159                 Jo  = numpy.sum( numpy.abs(_Innovation) )
160             elif QualityMeasure in ["MaximumError","ME"]:
161                 Jb  = 0.
162                 Jo  = numpy.max( numpy.abs(_Innovation) )
163             #
164             J   = float( Jb ) + float( Jo )
165             #
166             self.StoredVariables["CostFunctionJb"].store( Jb )
167             self.StoredVariables["CostFunctionJo"].store( Jo )
168             self.StoredVariables["CostFunctionJ" ].store( J )
169             if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
170                "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
171                "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
172                "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
173                "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
174                "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
175                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
176             if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"]:
177                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
178             if "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
179                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
180             if "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
181                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
182             if "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
183                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
184             if "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
185                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
186             if "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
187                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
188             return J
189         #
190         # Point de démarrage de l'optimisation : Xini = Xb
191         # ------------------------------------
192         Xini = numpy.ravel(Xb)
193         if len(Xini) < 2 and self._parameters["Minimizer"] == "NEWUOA":
194             raise ValueError("The minimizer %s can not be used when the optimisation state dimension is 1. Please choose another minimizer."%self._parameters["Minimizer"])
195         #
196         # Minimisation de la fonctionnelle
197         # --------------------------------
198         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
199         #
200         if self._parameters["Minimizer"] == "POWELL":
201             Minimum, J_optimal, direc, niter, nfeval, rc = scipy.optimize.fmin_powell(
202                 func        = CostFunction,
203                 x0          = Xini,
204                 args        = (self._parameters["QualityCriterion"],),
205                 maxiter     = self._parameters["MaximumNumberOfSteps"]-1,
206                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
207                 xtol        = self._parameters["StateVariationTolerance"],
208                 ftol        = self._parameters["CostDecrementTolerance"],
209                 full_output = True,
210                 disp        = self._parameters["optdisp"],
211                 )
212         elif self._parameters["Minimizer"] == "COBYLA" and not PlatformInfo.has_nlopt:
213             def make_constraints(bounds):
214                 constraints = []
215                 for (i,(a,b)) in enumerate(bounds):
216                     lower = lambda x: x[i] - a
217                     upper = lambda x: b - x[i]
218                     constraints = constraints + [lower] + [upper]
219                 return constraints
220             if self._parameters["Bounds"] is None:
221                 raise ValueError("Bounds have to be given for all axes as a list of lower/upper pairs!")
222             Minimum = scipy.optimize.fmin_cobyla(
223                 func        = CostFunction,
224                 x0          = Xini,
225                 cons        = make_constraints( self._parameters["Bounds"] ),
226                 args        = (self._parameters["QualityCriterion"],),
227                 consargs    = (), # To avoid extra-args
228                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
229                 rhobeg      = 1.0,
230                 rhoend      = self._parameters["StateVariationTolerance"],
231                 catol       = 2.*self._parameters["StateVariationTolerance"],
232                 disp        = self._parameters["optdisp"],
233                 )
234         elif self._parameters["Minimizer"] == "COBYLA" and PlatformInfo.has_nlopt:
235             import nlopt
236             opt = nlopt.opt(nlopt.LN_COBYLA, Xini.size)
237             def _f(_Xx, Grad):
238                 # DFO, so no gradient
239                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
240             opt.set_min_objective(_f)
241             if self._parameters["Bounds"] is not None:
242                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
243                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
244                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
245                 if self._parameters["optdisp"]:
246                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
247                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
248                 opt.set_upper_bounds(ub)
249                 opt.set_lower_bounds(lb)
250             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
251             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
252             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
253             Minimum = opt.optimize( Xini )
254             if self._parameters["optdisp"]:
255                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
256                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
257                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
258         elif self._parameters["Minimizer"] == "SIMPLEX" and not PlatformInfo.has_nlopt:
259             Minimum, J_optimal, niter, nfeval, rc = scipy.optimize.fmin(
260                 func        = CostFunction,
261                 x0          = Xini,
262                 args        = (self._parameters["QualityCriterion"],),
263                 maxiter     = self._parameters["MaximumNumberOfSteps"]-1,
264                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
265                 xtol        = self._parameters["StateVariationTolerance"],
266                 ftol        = self._parameters["CostDecrementTolerance"],
267                 full_output = True,
268                 disp        = self._parameters["optdisp"],
269                 )
270         elif self._parameters["Minimizer"] == "SIMPLEX" and PlatformInfo.has_nlopt:
271             import nlopt
272             opt = nlopt.opt(nlopt.LN_NELDERMEAD, Xini.size)
273             def _f(_Xx, Grad):
274                 # DFO, so no gradient
275                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
276             opt.set_min_objective(_f)
277             if self._parameters["Bounds"] is not None:
278                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
279                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
280                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
281                 if self._parameters["optdisp"]:
282                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
283                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
284                 opt.set_upper_bounds(ub)
285                 opt.set_lower_bounds(lb)
286             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
287             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
288             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
289             Minimum = opt.optimize( Xini )
290             if self._parameters["optdisp"]:
291                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
292                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
293                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
294         elif self._parameters["Minimizer"] == "BOBYQA" and PlatformInfo.has_nlopt:
295             import nlopt
296             opt = nlopt.opt(nlopt.LN_BOBYQA, Xini.size)
297             def _f(_Xx, Grad):
298                 # DFO, so no gradient
299                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
300             opt.set_min_objective(_f)
301             if self._parameters["Bounds"] is not None:
302                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
303                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
304                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
305                 if self._parameters["optdisp"]:
306                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
307                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
308                 opt.set_upper_bounds(ub)
309                 opt.set_lower_bounds(lb)
310             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
311             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
312             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
313             Minimum = opt.optimize( Xini )
314             if self._parameters["optdisp"]:
315                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
316                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
317                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
318         elif self._parameters["Minimizer"] == "NEWUOA" and PlatformInfo.has_nlopt:
319             import nlopt
320             opt = nlopt.opt(nlopt.LN_NEWUOA, Xini.size)
321             def _f(_Xx, Grad):
322                 # DFO, so no gradient
323                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
324             opt.set_min_objective(_f)
325             if self._parameters["Bounds"] is not None:
326                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
327                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
328                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
329                 if self._parameters["optdisp"]:
330                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
331                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
332                 opt.set_upper_bounds(ub)
333                 opt.set_lower_bounds(lb)
334             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
335             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
336             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
337             Minimum = opt.optimize( Xini )
338             if self._parameters["optdisp"]:
339                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
340                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
341                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
342         elif self._parameters["Minimizer"] == "SUBPLEX" and PlatformInfo.has_nlopt:
343             import nlopt
344             opt = nlopt.opt(nlopt.LN_SBPLX, Xini.size)
345             def _f(_Xx, Grad):
346                 # DFO, so no gradient
347                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
348             opt.set_min_objective(_f)
349             if self._parameters["Bounds"] is not None:
350                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
351                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
352                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
353                 if self._parameters["optdisp"]:
354                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
355                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
356                 opt.set_upper_bounds(ub)
357                 opt.set_lower_bounds(lb)
358             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
359             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
360             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
361             Minimum = opt.optimize( Xini )
362             if self._parameters["optdisp"]:
363                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
364                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
365                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
366         else:
367             raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"])
368         #
369         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
370         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
371         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
372         #
373         # Obtention de l'analyse
374         # ----------------------
375         Xa = numpy.asmatrix(numpy.ravel( Minimum )).T
376         #
377         self.StoredVariables["Analysis"].store( Xa.A1 )
378         #
379         if "OMA"                           in self._parameters["StoreSupplementaryCalculations"] or \
380            "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
381             if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
382                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
383             elif "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
384                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
385             else:
386                 HXa = Hm(Xa)
387         #
388         if "Innovation" in self._parameters["StoreSupplementaryCalculations"]:
389             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
390         if "OMB" in self._parameters["StoreSupplementaryCalculations"]:
391             self.StoredVariables["OMB"].store( numpy.ravel(d) )
392         if "BMA" in self._parameters["StoreSupplementaryCalculations"]:
393             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
394         if "OMA" in self._parameters["StoreSupplementaryCalculations"]:
395             self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
396         if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]:
397             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) )
398         if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
399             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
400         #
401         self._post_run()
402         return 0
403
404 # ==============================================================================
405 if __name__ == "__main__":
406     print('\n AUTODIAGNOSTIC \n')