]> SALOME platform Git repositories - modules/adao.git/blob - src/daComposant/daAlgorithms/DerivativeFreeOptimization.py
Salome HOME
Improvements of DFO
[modules/adao.git] / src / daComposant / daAlgorithms / DerivativeFreeOptimization.py
1 #-*-coding:iso-8859-1-*-
2 #
3 # Copyright (C) 2008-2017 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects, PlatformInfo
25 import numpy, scipy.optimize
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "DERIVATIVEFREEOPTIMIZATION")
31         self.defineRequiredParameter(
32             name     = "Minimizer",
33             default  = "BOBYQA",
34             typecast = str,
35             message  = "Minimiseur utilisé",
36             listval  = ["BOBYQA", "COBYLA", "NEWUOA", "POWELL", "SIMPLEX", "SUBPLEX"],
37             )
38         self.defineRequiredParameter(
39             name     = "MaximumNumberOfSteps",
40             default  = 15000,
41             typecast = int,
42             message  = "Nombre maximal de pas d'optimisation",
43             minval   = -1,
44             )
45         self.defineRequiredParameter(
46             name     = "MaximumNumberOfFunctionEvaluations",
47             default  = 15000,
48             typecast = int,
49             message  = "Nombre maximal de d'évaluations de la fonction",
50             minval   = -1,
51             )
52         self.defineRequiredParameter(
53             name     = "StateVariationTolerance",
54             default  = 1.e-4,
55             typecast = float,
56             message  = "Variation relative maximale de l'état lors de l'arrêt",
57             )
58         self.defineRequiredParameter(
59             name     = "CostDecrementTolerance",
60             default  = 1.e-7,
61             typecast = float,
62             message  = "Diminution relative minimale du cout lors de l'arrêt",
63             )
64         self.defineRequiredParameter(
65             name     = "QualityCriterion",
66             default  = "AugmentedWeightedLeastSquares",
67             typecast = str,
68             message  = "Critère de qualité utilisé",
69             listval  = ["AugmentedWeightedLeastSquares","AWLS","DA",
70                         "WeightedLeastSquares","WLS",
71                         "LeastSquares","LS","L2",
72                         "AbsoluteValue","L1",
73                         "MaximumError","ME"],
74             )
75         self.defineRequiredParameter(
76             name     = "StoreInternalVariables",
77             default  = False,
78             typecast = bool,
79             message  = "Stockage des variables internes ou intermédiaires du calcul",
80             )
81         self.defineRequiredParameter(
82             name     = "StoreSupplementaryCalculations",
83             default  = [],
84             typecast = tuple,
85             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
86             listval  = ["CurrentState", "CostFunctionJ", "CostFunctionJb", "CostFunctionJo", "CostFunctionJAtCurrentOptimum", "CurrentOptimum", "IndexOfOptimum", "InnovationAtCurrentState", "BMA", "OMA", "OMB", "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentOptimum", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"]
87             )
88         self.defineRequiredParameter( # Pas de type
89             name     = "Bounds",
90             message  = "Liste des valeurs de bornes",
91             )
92
93     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
94         self._pre_run()
95         if logging.getLogger().level < logging.WARNING:
96             self.__disp = 1
97         else:
98             self.__disp = 0
99         #
100         # Paramètres de pilotage
101         # ----------------------
102         self.setParameters(Parameters)
103         #
104         if self._parameters.has_key("Bounds") and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0):
105             Bounds = self._parameters["Bounds"]
106             logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
107         else:
108             Bounds = None
109         #
110         # Opérateurs
111         # ----------
112         Hm = HO["Direct"].appliedTo
113         #
114         # Précalcul des inversions de B et R
115         # ----------------------------------
116         BI = B.getI()
117         RI = R.getI()
118         #
119         # Définition de la fonction-coût
120         # ------------------------------
121         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
122             _X  = numpy.asmatrix(numpy.ravel( x )).T
123             self.StoredVariables["CurrentState"].store( _X )
124             _HX = Hm( _X )
125             _HX = numpy.asmatrix(numpy.ravel( _HX )).T
126             _Innovation = Y - _HX
127             if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \
128                "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
129                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
130             if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
131                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
132             #
133             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
134                 if BI is None or RI is None:
135                     raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
136                 Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
137                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
138             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
139                 if RI is None:
140                     raise ValueError("Observation error covariance matrix has to be properly defined!")
141                 Jb  = 0.
142                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
143             elif QualityMeasure in ["LeastSquares","LS","L2"]:
144                 Jb  = 0.
145                 Jo  = 0.5 * (_Innovation).T * (_Innovation)
146             elif QualityMeasure in ["AbsoluteValue","L1"]:
147                 Jb  = 0.
148                 Jo  = numpy.sum( numpy.abs(_Innovation) )
149             elif QualityMeasure in ["MaximumError","ME"]:
150                 Jb  = 0.
151                 Jo  = numpy.max( numpy.abs(_Innovation) )
152             #
153             J   = float( Jb ) + float( Jo )
154             #
155             self.StoredVariables["CostFunctionJb"].store( Jb )
156             self.StoredVariables["CostFunctionJo"].store( Jo )
157             self.StoredVariables["CostFunctionJ" ].store( J )
158             if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
159                "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
160                "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
161                "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
162                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
163             if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"]:
164                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
165             if "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
166                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
167             if "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
168                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
169             if "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
170                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
171                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
172                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
173             return J
174         #
175         # Point de démarrage de l'optimisation : Xini = Xb
176         # ------------------------------------
177         Xini = numpy.ravel(Xb)
178         #
179         # Minimisation de la fonctionnelle
180         # --------------------------------
181         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
182         #
183         if self._parameters["Minimizer"] == "POWELL":
184             Minimum, J_optimal, direc, niter, nfeval, rc = scipy.optimize.fmin_powell(
185                 func        = CostFunction,
186                 x0          = Xini,
187                 args        = (self._parameters["QualityCriterion"],),
188                 maxiter     = self._parameters["MaximumNumberOfSteps"]-1,
189                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
190                 xtol        = self._parameters["StateVariationTolerance"],
191                 ftol        = self._parameters["CostDecrementTolerance"],
192                 full_output = True,
193                 disp        = self.__disp,
194                 )
195         elif self._parameters["Minimizer"] == "COBYLA" and not PlatformInfo.has_nlopt:
196             def make_constraints(bounds):
197                 constraints = []
198                 for (i,(a,b)) in enumerate(bounds):
199                     lower = lambda x: x[i] - a
200                     upper = lambda x: b - x[i]
201                     constraints = constraints + [lower] + [upper]
202                 return constraints
203             if Bounds is None:
204                 raise ValueError("Bounds have to be given for all axes as a list of lower/upper pairs!")
205             Minimum = scipy.optimize.fmin_cobyla(
206                 func        = CostFunction,
207                 x0          = Xini,
208                 cons        = make_constraints( Bounds ),
209                 args        = (self._parameters["QualityCriterion"],),
210                 consargs    = (), # To avoid extra-args
211                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
212                 rhobeg      = 1.0,
213                 rhoend      = self._parameters["StateVariationTolerance"],
214                 catol       = 2.*self._parameters["StateVariationTolerance"],
215                 disp        = self.__disp,
216                 )
217         elif self._parameters["Minimizer"] == "COBYLA" and PlatformInfo.has_nlopt:
218             import nlopt
219             opt = nlopt.opt(nlopt.LN_COBYLA, Xini.size)
220             def _f(_Xx, Grad):
221                 # DFO, so no gradient
222                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
223             opt.set_min_objective(_f)
224             if Bounds is not None:
225                 lub = numpy.array(Bounds).reshape((Xini.size,2))
226                 lb = lub[:,0]
227                 ub = lub[:,1]
228                 if self.__disp:
229                     print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub)
230                     print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb)
231                 opt.set_upper_bounds(ub)
232                 opt.set_lower_bounds(lb)
233             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
234             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
235             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
236             Minimum = opt.optimize( Xini )
237             if self.__disp:
238                 print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)
239                 print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())
240                 print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())
241         elif self._parameters["Minimizer"] == "SIMPLEX" and not PlatformInfo.has_nlopt:
242             Minimum, J_optimal, niter, nfeval, rc = scipy.optimize.fmin(
243                 func        = CostFunction,
244                 x0          = Xini,
245                 args        = (self._parameters["QualityCriterion"],),
246                 maxiter     = self._parameters["MaximumNumberOfSteps"]-1,
247                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
248                 xtol        = self._parameters["StateVariationTolerance"],
249                 ftol        = self._parameters["CostDecrementTolerance"],
250                 full_output = True,
251                 disp        = self.__disp,
252                 )
253         elif self._parameters["Minimizer"] == "SIMPLEX" and PlatformInfo.has_nlopt:
254             import nlopt
255             opt = nlopt.opt(nlopt.LN_NELDERMEAD, Xini.size)
256             def _f(_Xx, Grad):
257                 # DFO, so no gradient
258                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
259             opt.set_min_objective(_f)
260             if Bounds is not None:
261                 lub = numpy.array(Bounds).reshape((Xini.size,2))
262                 lb = lub[:,0]
263                 ub = lub[:,1]
264                 if self.__disp:
265                     print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub)
266                     print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb)
267                 opt.set_upper_bounds(ub)
268                 opt.set_lower_bounds(lb)
269             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
270             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
271             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
272             Minimum = opt.optimize( Xini )
273             if self.__disp:
274                 print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)
275                 print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())
276                 print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())
277         elif self._parameters["Minimizer"] == "BOBYQA" and PlatformInfo.has_nlopt:
278             import nlopt
279             opt = nlopt.opt(nlopt.LN_BOBYQA, Xini.size)
280             def _f(_Xx, Grad):
281                 # DFO, so no gradient
282                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
283             opt.set_min_objective(_f)
284             if Bounds is not None:
285                 lub = numpy.array(Bounds).reshape((Xini.size,2))
286                 lb = lub[:,0]
287                 ub = lub[:,1]
288                 if self.__disp:
289                     print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub)
290                     print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb)
291                 opt.set_upper_bounds(ub)
292                 opt.set_lower_bounds(lb)
293             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
294             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
295             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
296             Minimum = opt.optimize( Xini )
297             if self.__disp:
298                 print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)
299                 print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())
300                 print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())
301         elif self._parameters["Minimizer"] == "NEWUOA" and PlatformInfo.has_nlopt:
302             import nlopt
303             opt = nlopt.opt(nlopt.LN_NEWUOA, Xini.size)
304             def _f(_Xx, Grad):
305                 # DFO, so no gradient
306                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
307             opt.set_min_objective(_f)
308             if Bounds is not None:
309                 lub = numpy.array(Bounds).reshape((Xini.size,2))
310                 lb = lub[:,0]
311                 ub = lub[:,1]
312                 if self.__disp:
313                     print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub)
314                     print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb)
315                 opt.set_upper_bounds(ub)
316                 opt.set_lower_bounds(lb)
317             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
318             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
319             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
320             Minimum = opt.optimize( Xini )
321             if self.__disp:
322                 print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)
323                 print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())
324                 print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())
325         elif self._parameters["Minimizer"] == "SUBPLEX" and PlatformInfo.has_nlopt:
326             import nlopt
327             opt = nlopt.opt(nlopt.LN_SBPLX, Xini.size)
328             def _f(_Xx, Grad):
329                 # DFO, so no gradient
330                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
331             opt.set_min_objective(_f)
332             if Bounds is not None:
333                 lub = numpy.array(Bounds).reshape((Xini.size,2))
334                 lb = lub[:,0]
335                 ub = lub[:,1]
336                 if self.__disp:
337                     print "%s: upper bounds %s"%(opt.get_algorithm_name(),ub)
338                     print "%s: lower bounds %s"%(opt.get_algorithm_name(),lb)
339                 opt.set_upper_bounds(ub)
340                 opt.set_lower_bounds(lb)
341             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
342             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
343             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
344             Minimum = opt.optimize( Xini )
345             if self.__disp:
346                 print "%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)
347                 print "%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())
348                 print "%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())
349         else:
350             raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"])
351         #
352         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
353         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
354         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
355         #
356         # Obtention de l'analyse
357         # ----------------------
358         Xa = numpy.asmatrix(numpy.ravel( Minimum )).T
359         #
360         self.StoredVariables["Analysis"].store( Xa.A1 )
361         #
362         if "OMA"                           in self._parameters["StoreSupplementaryCalculations"] or \
363            "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
364             if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
365                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
366             elif "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
367                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
368             else:
369                 HXa = Hm(Xa)
370         #
371         if "Innovation" in self._parameters["StoreSupplementaryCalculations"]:
372             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
373         if "OMB" in self._parameters["StoreSupplementaryCalculations"]:
374             self.StoredVariables["OMB"].store( numpy.ravel(d) )
375         if "BMA" in self._parameters["StoreSupplementaryCalculations"]:
376             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
377         if "OMA" in self._parameters["StoreSupplementaryCalculations"]:
378             self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
379         if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]:
380             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) )
381         if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
382             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
383         #
384         self._post_run()
385         return 0
386
387 # ==============================================================================
388 if __name__ == "__main__":
389     print '\n AUTODIAGNOSTIC \n'