Salome HOME
Python 3 compatibility improvement (UTF-8) and data interface changes
[modules/adao.git] / src / daComposant / daAlgorithms / DerivativeFreeOptimization.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2017 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects, PlatformInfo
25 import numpy, scipy.optimize
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "DERIVATIVEFREEOPTIMIZATION")
31         self.defineRequiredParameter(
32             name     = "Minimizer",
33             default  = "BOBYQA",
34             typecast = str,
35             message  = "Minimiseur utilisé",
36             listval  = ["BOBYQA", "COBYLA", "NEWUOA", "POWELL", "SIMPLEX", "SUBPLEX"],
37             )
38         self.defineRequiredParameter(
39             name     = "MaximumNumberOfSteps",
40             default  = 15000,
41             typecast = int,
42             message  = "Nombre maximal de pas d'optimisation",
43             minval   = -1,
44             )
45         self.defineRequiredParameter(
46             name     = "MaximumNumberOfFunctionEvaluations",
47             default  = 15000,
48             typecast = int,
49             message  = "Nombre maximal d'évaluations de la fonction",
50             minval   = -1,
51             )
52         self.defineRequiredParameter(
53             name     = "StateVariationTolerance",
54             default  = 1.e-4,
55             typecast = float,
56             message  = "Variation relative maximale de l'état lors de l'arrêt",
57             )
58         self.defineRequiredParameter(
59             name     = "CostDecrementTolerance",
60             default  = 1.e-7,
61             typecast = float,
62             message  = "Diminution relative minimale du cout lors de l'arrêt",
63             )
64         self.defineRequiredParameter(
65             name     = "QualityCriterion",
66             default  = "AugmentedWeightedLeastSquares",
67             typecast = str,
68             message  = "Critère de qualité utilisé",
69             listval  = ["AugmentedWeightedLeastSquares","AWLS","DA",
70                         "WeightedLeastSquares","WLS",
71                         "LeastSquares","LS","L2",
72                         "AbsoluteValue","L1",
73                         "MaximumError","ME"],
74             )
75         self.defineRequiredParameter(
76             name     = "StoreInternalVariables",
77             default  = False,
78             typecast = bool,
79             message  = "Stockage des variables internes ou intermédiaires du calcul",
80             )
81         self.defineRequiredParameter(
82             name     = "StoreSupplementaryCalculations",
83             default  = [],
84             typecast = tuple,
85             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
86             listval  = ["CurrentState", "CostFunctionJ", "CostFunctionJb", "CostFunctionJo", "CostFunctionJAtCurrentOptimum", "CurrentOptimum", "IndexOfOptimum", "InnovationAtCurrentState", "BMA", "OMA", "OMB", "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentOptimum", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"]
87             )
88         self.defineRequiredParameter( # Pas de type
89             name     = "Bounds",
90             message  = "Liste des valeurs de bornes",
91             )
92
93     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
94         self._pre_run(Parameters)
95         #
96         if not PlatformInfo.has_nlopt and not self._parameters["Minimizer"] in ["COBYLA", "POWELL", "SIMPLEX"]:
97             self._parameters["Minimizer"] = "SIMPLEX"
98         #
99         # Opérateurs
100         # ----------
101         Hm = HO["Direct"].appliedTo
102         #
103         # Précalcul des inversions de B et R
104         # ----------------------------------
105         BI = B.getI()
106         RI = R.getI()
107         #
108         # Définition de la fonction-coût
109         # ------------------------------
110         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
111             _X  = numpy.asmatrix(numpy.ravel( x )).T
112             self.StoredVariables["CurrentState"].store( _X )
113             _HX = Hm( _X )
114             _HX = numpy.asmatrix(numpy.ravel( _HX )).T
115             _Innovation = Y - _HX
116             if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \
117                "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
118                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
119             if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
120                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
121             #
122             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
123                 if BI is None or RI is None:
124                     raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
125                 Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
126                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
127             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
128                 if RI is None:
129                     raise ValueError("Observation error covariance matrix has to be properly defined!")
130                 Jb  = 0.
131                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
132             elif QualityMeasure in ["LeastSquares","LS","L2"]:
133                 Jb  = 0.
134                 Jo  = 0.5 * (_Innovation).T * (_Innovation)
135             elif QualityMeasure in ["AbsoluteValue","L1"]:
136                 Jb  = 0.
137                 Jo  = numpy.sum( numpy.abs(_Innovation) )
138             elif QualityMeasure in ["MaximumError","ME"]:
139                 Jb  = 0.
140                 Jo  = numpy.max( numpy.abs(_Innovation) )
141             #
142             J   = float( Jb ) + float( Jo )
143             #
144             self.StoredVariables["CostFunctionJb"].store( Jb )
145             self.StoredVariables["CostFunctionJo"].store( Jo )
146             self.StoredVariables["CostFunctionJ" ].store( J )
147             if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
148                "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
149                "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
150                "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
151                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
152             if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"]:
153                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
154             if "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
155                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
156             if "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
157                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
158             if "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
159                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
160                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
161                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
162             return J
163         #
164         # Point de démarrage de l'optimisation : Xini = Xb
165         # ------------------------------------
166         Xini = numpy.ravel(Xb)
167         if len(Xini) < 2 and self._parameters["Minimizer"] == "NEWUOA":
168             raise ValueError("The minimizer %s can not be used when the optimisation state dimension is 1. Please choose another minimizer."%self._parameters["Minimizer"])
169         #
170         # Minimisation de la fonctionnelle
171         # --------------------------------
172         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
173         #
174         if self._parameters["Minimizer"] == "POWELL":
175             Minimum, J_optimal, direc, niter, nfeval, rc = scipy.optimize.fmin_powell(
176                 func        = CostFunction,
177                 x0          = Xini,
178                 args        = (self._parameters["QualityCriterion"],),
179                 maxiter     = self._parameters["MaximumNumberOfSteps"]-1,
180                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
181                 xtol        = self._parameters["StateVariationTolerance"],
182                 ftol        = self._parameters["CostDecrementTolerance"],
183                 full_output = True,
184                 disp        = self._parameters["optdisp"],
185                 )
186         elif self._parameters["Minimizer"] == "COBYLA" and not PlatformInfo.has_nlopt:
187             def make_constraints(bounds):
188                 constraints = []
189                 for (i,(a,b)) in enumerate(bounds):
190                     lower = lambda x: x[i] - a
191                     upper = lambda x: b - x[i]
192                     constraints = constraints + [lower] + [upper]
193                 return constraints
194             if self._parameters["Bounds"] is None:
195                 raise ValueError("Bounds have to be given for all axes as a list of lower/upper pairs!")
196             Minimum = scipy.optimize.fmin_cobyla(
197                 func        = CostFunction,
198                 x0          = Xini,
199                 cons        = make_constraints( self._parameters["Bounds"] ),
200                 args        = (self._parameters["QualityCriterion"],),
201                 consargs    = (), # To avoid extra-args
202                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
203                 rhobeg      = 1.0,
204                 rhoend      = self._parameters["StateVariationTolerance"],
205                 catol       = 2.*self._parameters["StateVariationTolerance"],
206                 disp        = self._parameters["optdisp"],
207                 )
208         elif self._parameters["Minimizer"] == "COBYLA" and PlatformInfo.has_nlopt:
209             import nlopt
210             opt = nlopt.opt(nlopt.LN_COBYLA, Xini.size)
211             def _f(_Xx, Grad):
212                 # DFO, so no gradient
213                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
214             opt.set_min_objective(_f)
215             if self._parameters["Bounds"] is not None:
216                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
217                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
218                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
219                 if self._parameters["optdisp"]:
220                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
221                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
222                 opt.set_upper_bounds(ub)
223                 opt.set_lower_bounds(lb)
224             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
225             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
226             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
227             Minimum = opt.optimize( Xini )
228             if self._parameters["optdisp"]:
229                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
230                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
231                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
232         elif self._parameters["Minimizer"] == "SIMPLEX" and not PlatformInfo.has_nlopt:
233             Minimum, J_optimal, niter, nfeval, rc = scipy.optimize.fmin(
234                 func        = CostFunction,
235                 x0          = Xini,
236                 args        = (self._parameters["QualityCriterion"],),
237                 maxiter     = self._parameters["MaximumNumberOfSteps"]-1,
238                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"],
239                 xtol        = self._parameters["StateVariationTolerance"],
240                 ftol        = self._parameters["CostDecrementTolerance"],
241                 full_output = True,
242                 disp        = self._parameters["optdisp"],
243                 )
244         elif self._parameters["Minimizer"] == "SIMPLEX" and PlatformInfo.has_nlopt:
245             import nlopt
246             opt = nlopt.opt(nlopt.LN_NELDERMEAD, Xini.size)
247             def _f(_Xx, Grad):
248                 # DFO, so no gradient
249                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
250             opt.set_min_objective(_f)
251             if self._parameters["Bounds"] is not None:
252                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
253                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
254                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
255                 if self._parameters["optdisp"]:
256                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
257                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
258                 opt.set_upper_bounds(ub)
259                 opt.set_lower_bounds(lb)
260             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
261             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
262             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
263             Minimum = opt.optimize( Xini )
264             if self._parameters["optdisp"]:
265                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
266                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
267                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
268         elif self._parameters["Minimizer"] == "BOBYQA" and PlatformInfo.has_nlopt:
269             import nlopt
270             opt = nlopt.opt(nlopt.LN_BOBYQA, Xini.size)
271             def _f(_Xx, Grad):
272                 # DFO, so no gradient
273                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
274             opt.set_min_objective(_f)
275             if self._parameters["Bounds"] is not None:
276                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
277                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
278                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
279                 if self._parameters["optdisp"]:
280                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
281                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
282                 opt.set_upper_bounds(ub)
283                 opt.set_lower_bounds(lb)
284             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
285             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
286             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
287             Minimum = opt.optimize( Xini )
288             if self._parameters["optdisp"]:
289                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
290                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
291                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
292         elif self._parameters["Minimizer"] == "NEWUOA" and PlatformInfo.has_nlopt:
293             import nlopt
294             opt = nlopt.opt(nlopt.LN_NEWUOA, Xini.size)
295             def _f(_Xx, Grad):
296                 # DFO, so no gradient
297                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
298             opt.set_min_objective(_f)
299             if self._parameters["Bounds"] is not None:
300                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
301                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
302                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
303                 if self._parameters["optdisp"]:
304                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
305                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
306                 opt.set_upper_bounds(ub)
307                 opt.set_lower_bounds(lb)
308             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
309             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
310             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
311             Minimum = opt.optimize( Xini )
312             if self._parameters["optdisp"]:
313                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
314                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
315                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
316         elif self._parameters["Minimizer"] == "SUBPLEX" and PlatformInfo.has_nlopt:
317             import nlopt
318             opt = nlopt.opt(nlopt.LN_SBPLX, Xini.size)
319             def _f(_Xx, Grad):
320                 # DFO, so no gradient
321                 return CostFunction(_Xx, self._parameters["QualityCriterion"])
322             opt.set_min_objective(_f)
323             if self._parameters["Bounds"] is not None:
324                 lub = numpy.array(self._parameters["Bounds"],dtype=float).reshape((Xini.size,2))
325                 lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
326                 ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
327                 if self._parameters["optdisp"]:
328                     print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
329                     print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
330                 opt.set_upper_bounds(ub)
331                 opt.set_lower_bounds(lb)
332             opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
333             opt.set_xtol_rel(2.*self._parameters["StateVariationTolerance"])
334             opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
335             Minimum = opt.optimize( Xini )
336             if self._parameters["optdisp"]:
337                 print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
338                 print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
339                 print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
340         else:
341             raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"])
342         #
343         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
344         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
345         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
346         #
347         # Obtention de l'analyse
348         # ----------------------
349         Xa = numpy.asmatrix(numpy.ravel( Minimum )).T
350         #
351         self.StoredVariables["Analysis"].store( Xa.A1 )
352         #
353         if "OMA"                           in self._parameters["StoreSupplementaryCalculations"] or \
354            "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
355             if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
356                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
357             elif "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
358                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
359             else:
360                 HXa = Hm(Xa)
361         #
362         if "Innovation" in self._parameters["StoreSupplementaryCalculations"]:
363             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
364         if "OMB" in self._parameters["StoreSupplementaryCalculations"]:
365             self.StoredVariables["OMB"].store( numpy.ravel(d) )
366         if "BMA" in self._parameters["StoreSupplementaryCalculations"]:
367             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
368         if "OMA" in self._parameters["StoreSupplementaryCalculations"]:
369             self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
370         if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]:
371             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) )
372         if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
373             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
374         #
375         self._post_run()
376         return 0
377
378 # ==============================================================================
379 if __name__ == "__main__":
380     print('\n AUTODIAGNOSTIC \n')