Salome HOME
Documentation and source minor corrections for observers
[modules/adao.git] / src / daComposant / daAlgorithms / DerivativeFreeOptimization.py
1 #-*-coding:iso-8859-1-*-
2 #
3 #  Copyright (C) 2008-2015 EDF R&D
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 #  See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 #  Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects
25 import numpy, scipy.optimize
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "DERIVATIVEFREEOPTIMIZATION")
31         self.defineRequiredParameter(
32             name     = "Minimizer",
33             default  = "POWELL",
34             typecast = str,
35             message  = "Minimiseur utilisé",
36             listval  = ["POWELL", "SIMPLEX"],
37             )
38         self.defineRequiredParameter(
39             name     = "MaximumNumberOfSteps",
40             default  = 15000,
41             typecast = int,
42             message  = "Nombre maximal de pas d'optimisation",
43             minval   = -1,
44             )
45         self.defineRequiredParameter(
46             name     = "MaximumNumberOfFunctionEvaluations",
47             default  = 15000,
48             typecast = int,
49             message  = "Nombre maximal de d'évaluations de la function",
50             minval   = -1,
51             )
52         self.defineRequiredParameter(
53             name     = "StateVariationTolerance",
54             default  = 1.e-4,
55             typecast = float,
56             message  = "Variation relative maximale de l'état lors de l'arrêt",
57             )
58         self.defineRequiredParameter(
59             name     = "CostDecrementTolerance",
60             default  = 1.e-7,
61             typecast = float,
62             message  = "Diminution relative minimale du cout lors de l'arrêt",
63             )
64         self.defineRequiredParameter(
65             name     = "QualityCriterion",
66             default  = "AugmentedWeightedLeastSquares",
67             typecast = str,
68             message  = "Critère de qualité utilisé",
69             listval  = ["AugmentedWeightedLeastSquares","AWLS","DA",
70                         "WeightedLeastSquares","WLS",
71                         "LeastSquares","LS","L2",
72                         "AbsoluteValue","L1",
73                         "MaximumError","ME"],
74             )
75         self.defineRequiredParameter(
76             name     = "StoreInternalVariables",
77             default  = False,
78             typecast = bool,
79             message  = "Stockage des variables internes ou intermédiaires du calcul",
80             )
81         self.defineRequiredParameter(
82             name     = "StoreSupplementaryCalculations",
83             default  = [],
84             typecast = tuple,
85             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
86             listval  = ["CurrentState", "CostFunctionJ", "CostFunctionJAtCurrentOptimum", "CurrentOptimum", "IndexOfOptimum", "InnovationAtCurrentState", "BMA", "OMA", "OMB", "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentOptimum", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"]
87             )
88
89     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
90         self._pre_run()
91         if logging.getLogger().level < logging.WARNING:
92             self.__disp = 1
93         else:
94             self.__disp = 0
95         #
96         # Paramètres de pilotage
97         # ----------------------
98         self.setParameters(Parameters)
99         #
100         # Opérateurs
101         # ----------
102         Hm = HO["Direct"].appliedTo
103         #
104         # Précalcul des inversions de B et R
105         # ----------------------------------
106         BI = B.getI()
107         RI = R.getI()
108         #
109         # Définition de la fonction-coût
110         # ------------------------------
111         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
112             _X  = numpy.asmatrix(numpy.ravel( x )).T
113             self.StoredVariables["CurrentState"].store( _X )
114             _HX = Hm( _X )
115             _HX = numpy.asmatrix(numpy.ravel( _HX )).T
116             _Innovation = Y - _HX
117             if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \
118                "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
119                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
120             if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
121                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
122             #
123             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
124                 if BI is None or RI is None:
125                     raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
126                 Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
127                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
128             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
129                 if RI is None:
130                     raise ValueError("Observation error covariance matrix has to be properly defined!")
131                 Jb  = 0.
132                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
133             elif QualityMeasure in ["LeastSquares","LS","L2"]:
134                 Jb  = 0.
135                 Jo  = 0.5 * (_Innovation).T * (_Innovation)
136             elif QualityMeasure in ["AbsoluteValue","L1"]:
137                 Jb  = 0.
138                 Jo  = numpy.sum( numpy.abs(_Innovation) )
139             elif QualityMeasure in ["MaximumError","ME"]:
140                 Jb  = 0.
141                 Jo  = numpy.max( numpy.abs(_Innovation) )
142             #
143             J   = float( Jb ) + float( Jo )
144             #
145             self.StoredVariables["CostFunctionJb"].store( Jb )
146             self.StoredVariables["CostFunctionJo"].store( Jo )
147             self.StoredVariables["CostFunctionJ" ].store( J )
148             if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
149                "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
150                "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
151                "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
152                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
153             if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"]:
154                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
155             if "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
156                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
157             if "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
158                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
159             if "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
160                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
161                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
162                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
163             return J
164         #
165         # Point de démarrage de l'optimisation : Xini = Xb
166         # ------------------------------------
167         Xini = numpy.ravel(Xb)
168         #
169         # Minimisation de la fonctionnelle
170         # --------------------------------
171         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
172         #
173         if self._parameters["Minimizer"] == "POWELL":
174             Minimum, J_optimal, direc, niter, nfeval, rc = scipy.optimize.fmin_powell(
175                 func        = CostFunction,
176                 x0          = Xini,
177                 args        = (self._parameters["QualityCriterion"],),
178                 maxiter     = self._parameters["MaximumNumberOfSteps"]-1,
179                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"]-1,
180                 xtol        = self._parameters["StateVariationTolerance"],
181                 ftol        = self._parameters["CostDecrementTolerance"],
182                 full_output = True,
183                 disp        = self.__disp,
184                 )
185         elif self._parameters["Minimizer"] == "SIMPLEX":
186             Minimum, J_optimal, niter, nfeval, rc = scipy.optimize.fmin(
187                 func        = CostFunction,
188                 x0          = Xini,
189                 args        = (self._parameters["QualityCriterion"],),
190                 maxiter     = self._parameters["MaximumNumberOfSteps"]-1,
191                 maxfun      = self._parameters["MaximumNumberOfFunctionEvaluations"]-1,
192                 xtol        = self._parameters["StateVariationTolerance"],
193                 ftol        = self._parameters["CostDecrementTolerance"],
194                 full_output = True,
195                 disp        = self.__disp,
196                 )
197         else:
198             raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"])
199         #
200         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
201         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
202         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
203         #
204         # Obtention de l'analyse
205         # ----------------------
206         Xa = numpy.asmatrix(numpy.ravel( Minimum )).T
207         #
208         self.StoredVariables["Analysis"].store( Xa.A1 )
209         #
210         if "OMA"                           in self._parameters["StoreSupplementaryCalculations"] or \
211            "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
212             if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
213                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
214             elif "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
215                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
216             else:
217                 HXa = Hm(Xa)
218         #
219         if "Innovation" in self._parameters["StoreSupplementaryCalculations"]:
220             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
221         if "OMB" in self._parameters["StoreSupplementaryCalculations"]:
222             self.StoredVariables["OMB"].store( numpy.ravel(d) )
223         if "BMA" in self._parameters["StoreSupplementaryCalculations"]:
224             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
225         if "OMA" in self._parameters["StoreSupplementaryCalculations"]:
226             self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
227         if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]:
228             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) )
229         if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
230             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
231         #
232         self._post_run()
233         return 0
234
235 # ==============================================================================
236 if __name__ == "__main__":
237     print '\n AUTODIAGNOSTIC \n'