]> SALOME platform Git repositories - modules/adao.git/blob - src/daComposant/daAlgorithms/DifferentialEvolution.py
Salome HOME
Minor correction do update data order
[modules/adao.git] / src / daComposant / daAlgorithms / DifferentialEvolution.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2018 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects
25 import numpy, scipy.optimize
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "DIFFERENTIALEVOLUTION")
31         self.defineRequiredParameter(
32             name     = "Minimizer",
33             default  = "BEST1BIN",
34             typecast = str,
35             message  = "Stratégie de minimisation utilisée",
36             listval  = [
37                 "BEST1BIN",
38                 "BEST1EXP",
39                 "RAND1EXP",
40                 "RANDTOBEST1EXP",
41                 "CURRENTTOBEST1EXP",
42                 "BEST2EXP",
43                 "RAND2EXP",
44                 "RANDTOBEST1BIN",
45                 "CURRENTTOBEST1BIN",
46                 "BEST2BIN",
47                 "RAND2BIN",
48                 "RAND1BIN",
49                 ],
50             )
51         self.defineRequiredParameter(
52             name     = "MaximumNumberOfSteps",
53             default  = 15000,
54             typecast = int,
55             message  = "Nombre maximal de générations",
56             minval   = 0,
57             )
58         self.defineRequiredParameter(
59             name     = "MaximumNumberOfFunctionEvaluations",
60             default  = 15000,
61             typecast = int,
62             message  = "Nombre maximal d'évaluations de la fonction",
63             minval   = -1,
64             )
65         self.defineRequiredParameter(
66             name     = "PopulationSize",
67             default  = 100,
68             typecast = int,
69             message  = "Taille approximative de la population à chaque génération",
70             minval   = 1,
71             )
72         self.defineRequiredParameter(
73             name     = "MutationDifferentialWeight_F",
74             default  = (0.5, 1),
75             typecast = tuple,
76             message  = "Poids différentiel de mutation, constant ou aléatoire dans l'intervalle, noté F",
77             minval   = 0.,
78             maxval   = 2.,
79             )
80         self.defineRequiredParameter(
81             name     = "CrossOverProbability_CR",
82             default  = 0.7,
83             typecast = float,
84             message  = "Probabilité de recombinaison ou de croisement, notée CR",
85             minval   = 0.,
86             maxval   = 1.,
87             )
88         self.defineRequiredParameter(
89             name     = "QualityCriterion",
90             default  = "AugmentedWeightedLeastSquares",
91             typecast = str,
92             message  = "Critère de qualité utilisé",
93             listval  = ["AugmentedWeightedLeastSquares","AWLS","DA",
94                         "WeightedLeastSquares","WLS",
95                         "LeastSquares","LS","L2",
96                         "AbsoluteValue","L1",
97                         "MaximumError","ME"],
98             )
99         self.defineRequiredParameter(
100             name     = "StoreInternalVariables",
101             default  = False,
102             typecast = bool,
103             message  = "Stockage des variables internes ou intermédiaires du calcul",
104             )
105         self.defineRequiredParameter(
106             name     = "StoreSupplementaryCalculations",
107             default  = [],
108             typecast = tuple,
109             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
110             listval  = [
111                 "BMA",
112                 "CostFunctionJ",
113                 "CostFunctionJb",
114                 "CostFunctionJo",
115                 "CostFunctionJAtCurrentOptimum",
116                 "CostFunctionJbAtCurrentOptimum",
117                 "CostFunctionJoAtCurrentOptimum",
118                 "CurrentOptimum",
119                 "CurrentState",
120                 "IndexOfOptimum",
121                 "Innovation",
122                 "InnovationAtCurrentState",
123                 "OMA",
124                 "OMB",
125                 "SimulatedObservationAtBackground",
126                 "SimulatedObservationAtCurrentOptimum",
127                 "SimulatedObservationAtCurrentState",
128                 "SimulatedObservationAtOptimum",
129                 ]
130             )
131         self.defineRequiredParameter(
132             name     = "SetSeed",
133             typecast = numpy.random.seed,
134             message  = "Graine fixée pour le générateur aléatoire",
135             )
136         self.defineRequiredParameter( # Pas de type
137             name     = "Bounds",
138             message  = "Liste des valeurs de bornes",
139             )
140         self.requireInputArguments(
141             mandatory= ("Xb", "Y", "HO", "R", "B" ),
142             )
143
144     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
145         self._pre_run(Parameters, Xb, Y, R, B, Q)
146         #
147         len_X = numpy.asarray(Xb).size
148         popsize = round(self._parameters["PopulationSize"]/len_X)
149         maxiter = min(self._parameters["MaximumNumberOfSteps"],round(self._parameters["MaximumNumberOfFunctionEvaluations"]/(popsize*len_X) - 1))
150         logging.debug("%s Nombre maximal de générations = %i, taille de la population à chaque génération = %i"%(self._name, maxiter, popsize*len_X))
151         #
152         # Opérateurs
153         # ----------
154         Hm = HO["Direct"].appliedTo
155         #
156         # Précalcul des inversions de B et R
157         # ----------------------------------
158         BI = B.getI()
159         RI = R.getI()
160         #
161         # Définition de la fonction-coût
162         # ------------------------------
163         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
164             _X  = numpy.asmatrix(numpy.ravel( x )).T
165             self.StoredVariables["CurrentState"].store( _X )
166             _HX = Hm( _X )
167             _HX = numpy.asmatrix(numpy.ravel( _HX )).T
168             _Innovation = Y - _HX
169             if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \
170                "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
171                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
172             if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
173                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
174             #
175             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
176                 if BI is None or RI is None:
177                     raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
178                 Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
179                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
180             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
181                 if RI is None:
182                     raise ValueError("Observation error covariance matrix has to be properly defined!")
183                 Jb  = 0.
184                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
185             elif QualityMeasure in ["LeastSquares","LS","L2"]:
186                 Jb  = 0.
187                 Jo  = 0.5 * (_Innovation).T * (_Innovation)
188             elif QualityMeasure in ["AbsoluteValue","L1"]:
189                 Jb  = 0.
190                 Jo  = numpy.sum( numpy.abs(_Innovation) )
191             elif QualityMeasure in ["MaximumError","ME"]:
192                 Jb  = 0.
193                 Jo  = numpy.max( numpy.abs(_Innovation) )
194             #
195             J   = float( Jb ) + float( Jo )
196             #
197             self.StoredVariables["CostFunctionJb"].store( Jb )
198             self.StoredVariables["CostFunctionJo"].store( Jo )
199             self.StoredVariables["CostFunctionJ" ].store( J )
200             if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
201                "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
202                "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
203                "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
204                "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
205                "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
206                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
207             if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"]:
208                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
209             if "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
210                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
211             if "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
212                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
213             if "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
214                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
215             if "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
216                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
217             if "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
218                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
219             return J
220         #
221         # Point de démarrage de l'optimisation : Xini = Xb
222         # ------------------------------------
223         Xini = numpy.ravel(Xb)
224         #
225         # Minimisation de la fonctionnelle
226         # --------------------------------
227         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
228         #
229         optResults = scipy.optimize.differential_evolution(
230             CostFunction,
231             self._parameters["Bounds"],
232             strategy      = str(self._parameters["Minimizer"]).lower(),
233             maxiter       = maxiter,
234             popsize       = popsize,
235             mutation      = self._parameters["MutationDifferentialWeight_F"],
236             recombination = self._parameters["CrossOverProbability_CR"],
237             disp          = self._parameters["optdisp"],
238             )
239         #
240         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
241         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
242         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
243         #
244         # Obtention de l'analyse
245         # ----------------------
246         Xa = numpy.ravel( Minimum )
247         #
248         self.StoredVariables["Analysis"].store( Xa )
249         #
250         # Calculs et/ou stockages supplémentaires
251         # ---------------------------------------
252         if "OMA"                           in self._parameters["StoreSupplementaryCalculations"] or \
253            "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
254             if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
255                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
256             elif "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
257                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
258             else:
259                 HXa = Hm(Xa)
260         if "Innovation" in self._parameters["StoreSupplementaryCalculations"] or \
261             "OMB" in self._parameters["StoreSupplementaryCalculations"]:
262             d  = Y - HXb
263         if "Innovation" in self._parameters["StoreSupplementaryCalculations"]:
264             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
265         if "OMB" in self._parameters["StoreSupplementaryCalculations"]:
266             self.StoredVariables["OMB"].store( numpy.ravel(d) )
267         if "BMA" in self._parameters["StoreSupplementaryCalculations"]:
268             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
269         if "OMA" in self._parameters["StoreSupplementaryCalculations"]:
270             self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
271         if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]:
272             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) )
273         if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
274             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
275         #
276         self._post_run()
277         return 0
278
279 # ==============================================================================
280 if __name__ == "__main__":
281     print('\n AUTODIAGNOSTIC \n')