]> SALOME platform Git repositories - modules/adao.git/blob - src/daComposant/daAlgorithms/DifferentialEvolution.py
Salome HOME
Adding DE algorithm, and small DFO coherency correction
[modules/adao.git] / src / daComposant / daAlgorithms / DifferentialEvolution.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2018 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects
25 import numpy, scipy.optimize
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "DIFFERENTIALEVOLUTION")
31         self.defineRequiredParameter(
32             name     = "Minimizer",
33             default  = "BEST1BIN",
34             typecast = str,
35             message  = "Stratégie de minimisation utilisée",
36             listval  = [
37                 "BEST1BIN",
38                 "BEST1EXP",
39                 "RAND1EXP",
40                 "RANDTOBEST1EXP",
41                 "CURRENTTOBEST1EXP",
42                 "BEST2EXP",
43                 "RAND2EXP",
44                 "RANDTOBEST1BIN",
45                 "CURRENTTOBEST1BIN",
46                 "BEST2BIN",
47                 "RAND2BIN",
48                 "RAND1BIN",
49                 ],
50             )
51         self.defineRequiredParameter(
52             name     = "MaximumNumberOfSteps",
53             default  = 15000,
54             typecast = int,
55             message  = "Nombre maximal de générations",
56             minval   = 0,
57             )
58         self.defineRequiredParameter(
59             name     = "MaximumNumberOfFunctionEvaluations",
60             default  = 15000,
61             typecast = int,
62             message  = "Nombre maximal d'évaluations de la fonction",
63             minval   = -1,
64             )
65         self.defineRequiredParameter(
66             name     = "PopulationSize",
67             default  = 100,
68             typecast = int,
69             message  = "Taille approximative de la population à chaque génération",
70             minval   = 1,
71             )
72         self.defineRequiredParameter(
73             name     = "MutationDifferentialWeight_F",
74             default  = (0.5, 1),
75             typecast = tuple,
76             message  = "Poids différentiel de mutation, constant ou aléatoire dans l'intervalle, noté F",
77             minval   = 0.,
78             maxval   = 2.,
79             )
80         self.defineRequiredParameter(
81             name     = "CrossOverProbability_CR",
82             default  = 0.7,
83             typecast = float,
84             message  = "Probabilité de recombinaison ou de croisement, notée CR",
85             minval   = 0.,
86             maxval   = 1.,
87             )
88         self.defineRequiredParameter(
89             name     = "QualityCriterion",
90             default  = "AugmentedWeightedLeastSquares",
91             typecast = str,
92             message  = "Critère de qualité utilisé",
93             listval  = ["AugmentedWeightedLeastSquares","AWLS","DA",
94                         "WeightedLeastSquares","WLS",
95                         "LeastSquares","LS","L2",
96                         "AbsoluteValue","L1",
97                         "MaximumError","ME"],
98             )
99         self.defineRequiredParameter(
100             name     = "StoreInternalVariables",
101             default  = False,
102             typecast = bool,
103             message  = "Stockage des variables internes ou intermédiaires du calcul",
104             )
105         self.defineRequiredParameter(
106             name     = "StoreSupplementaryCalculations",
107             default  = [],
108             typecast = tuple,
109             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
110             listval  = [
111                 "CurrentState",
112                 "CostFunctionJ",
113                 "CostFunctionJb",
114                 "CostFunctionJo",
115                 "CostFunctionJAtCurrentOptimum",
116                 "CostFunctionJbAtCurrentOptimum",
117                 "CostFunctionJoAtCurrentOptimum",
118                 "CurrentOptimum",
119                 "IndexOfOptimum",
120                 "InnovationAtCurrentState",
121                 "BMA",
122                 "OMA",
123                 "OMB",
124                 "SimulatedObservationAtBackground",
125                 "SimulatedObservationAtCurrentOptimum",
126                 "SimulatedObservationAtCurrentState",
127                 "SimulatedObservationAtOptimum",
128                 ]
129             )
130         self.defineRequiredParameter(
131             name     = "SetSeed",
132             typecast = numpy.random.seed,
133             message  = "Graine fixée pour le générateur aléatoire",
134             )
135         self.defineRequiredParameter( # Pas de type
136             name     = "Bounds",
137             message  = "Liste des valeurs de bornes",
138             )
139         self.requireInputArguments(
140             mandatory= ("Xb", "Y", "HO", "R", "B" ),
141             )
142
143     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
144         self._pre_run(Parameters, Xb, Y, R, B, Q)
145         #
146         len_X = numpy.asarray(Xb).size
147         popsize = round(self._parameters["PopulationSize"]/len_X)
148         maxiter = min(self._parameters["MaximumNumberOfSteps"],round(self._parameters["MaximumNumberOfFunctionEvaluations"]/(popsize*len_X) - 1))
149         logging.debug("%s Nombre maximal de générations = %i, taille de la population à chaque génération = %i"%(self._name, maxiter, popsize*len_X))
150         #
151         # Opérateurs
152         # ----------
153         Hm = HO["Direct"].appliedTo
154         #
155         # Précalcul des inversions de B et R
156         # ----------------------------------
157         BI = B.getI()
158         RI = R.getI()
159         #
160         # Définition de la fonction-coût
161         # ------------------------------
162         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
163             _X  = numpy.asmatrix(numpy.ravel( x )).T
164             self.StoredVariables["CurrentState"].store( _X )
165             _HX = Hm( _X )
166             _HX = numpy.asmatrix(numpy.ravel( _HX )).T
167             _Innovation = Y - _HX
168             if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \
169                "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
170                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
171             if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
172                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
173             #
174             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
175                 if BI is None or RI is None:
176                     raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
177                 Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
178                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
179             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
180                 if RI is None:
181                     raise ValueError("Observation error covariance matrix has to be properly defined!")
182                 Jb  = 0.
183                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
184             elif QualityMeasure in ["LeastSquares","LS","L2"]:
185                 Jb  = 0.
186                 Jo  = 0.5 * (_Innovation).T * (_Innovation)
187             elif QualityMeasure in ["AbsoluteValue","L1"]:
188                 Jb  = 0.
189                 Jo  = numpy.sum( numpy.abs(_Innovation) )
190             elif QualityMeasure in ["MaximumError","ME"]:
191                 Jb  = 0.
192                 Jo  = numpy.max( numpy.abs(_Innovation) )
193             #
194             J   = float( Jb ) + float( Jo )
195             #
196             self.StoredVariables["CostFunctionJb"].store( Jb )
197             self.StoredVariables["CostFunctionJo"].store( Jo )
198             self.StoredVariables["CostFunctionJ" ].store( J )
199             if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
200                "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
201                "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
202                "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
203                "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
204                "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
205                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
206             if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"]:
207                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
208             if "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
209                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
210             if "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
211                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
212             if "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
213                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
214             if "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
215                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
216             if "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
217                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
218             return J
219         #
220         # Point de démarrage de l'optimisation : Xini = Xb
221         # ------------------------------------
222         Xini = numpy.ravel(Xb)
223         #
224         # Minimisation de la fonctionnelle
225         # --------------------------------
226         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
227         #
228         optResults = scipy.optimize.differential_evolution(
229             CostFunction,
230             self._parameters["Bounds"],
231             strategy      = str(self._parameters["Minimizer"]).lower(),
232             maxiter       = maxiter,
233             popsize       = popsize,
234             mutation      = self._parameters["MutationDifferentialWeight_F"],
235             recombination = self._parameters["CrossOverProbability_CR"],
236             disp          = self._parameters["optdisp"],
237             )
238         #
239         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
240         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
241         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
242         #
243         # Obtention de l'analyse
244         # ----------------------
245         Xa = numpy.ravel( Minimum )
246         #
247         self.StoredVariables["Analysis"].store( Xa )
248         #
249         # Calculs et/ou stockages supplémentaires
250         # ---------------------------------------
251         if "OMA"                           in self._parameters["StoreSupplementaryCalculations"] or \
252            "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
253             if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
254                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
255             elif "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
256                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
257             else:
258                 HXa = Hm(Xa)
259         if "Innovation" in self._parameters["StoreSupplementaryCalculations"] or \
260             "OMB" in self._parameters["StoreSupplementaryCalculations"]:
261             d  = Y - HXb
262         if "Innovation" in self._parameters["StoreSupplementaryCalculations"]:
263             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
264         if "OMB" in self._parameters["StoreSupplementaryCalculations"]:
265             self.StoredVariables["OMB"].store( numpy.ravel(d) )
266         if "BMA" in self._parameters["StoreSupplementaryCalculations"]:
267             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
268         if "OMA" in self._parameters["StoreSupplementaryCalculations"]:
269             self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
270         if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]:
271             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) )
272         if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
273             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
274         #
275         self._post_run()
276         return 0
277
278 # ==============================================================================
279 if __name__ == "__main__":
280     print('\n AUTODIAGNOSTIC \n')