]> SALOME platform Git repositories - modules/adao.git/blob - src/daComposant/daAlgorithms/DifferentialEvolution.py
Salome HOME
Updating copyright date information (2)
[modules/adao.git] / src / daComposant / daAlgorithms / DifferentialEvolution.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2019 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects
25 import numpy, scipy.optimize
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "DIFFERENTIALEVOLUTION")
31         self.defineRequiredParameter(
32             name     = "Minimizer",
33             default  = "BEST1BIN",
34             typecast = str,
35             message  = "Stratégie de minimisation utilisée",
36             listval  = [
37                 "BEST1BIN",
38                 "BEST1EXP",
39                 "RAND1EXP",
40                 "RANDTOBEST1EXP",
41                 # "CURRENTTOBEST1EXP",
42                 "BEST2EXP",
43                 "RAND2EXP",
44                 "RANDTOBEST1BIN",
45                 # "CURRENTTOBEST1BIN",
46                 "BEST2BIN",
47                 "RAND2BIN",
48                 "RAND1BIN",
49                 ],
50             )
51         self.defineRequiredParameter(
52             name     = "MaximumNumberOfSteps",
53             default  = 15000,
54             typecast = int,
55             message  = "Nombre maximal de générations",
56             minval   = 0,
57             )
58         self.defineRequiredParameter(
59             name     = "MaximumNumberOfFunctionEvaluations",
60             default  = 15000,
61             typecast = int,
62             message  = "Nombre maximal d'évaluations de la fonction",
63             minval   = -1,
64             )
65         self.defineRequiredParameter(
66             name     = "PopulationSize",
67             default  = 100,
68             typecast = int,
69             message  = "Taille approximative de la population à chaque génération",
70             minval   = 1,
71             )
72         self.defineRequiredParameter(
73             name     = "MutationDifferentialWeight_F",
74             default  = (0.5, 1),
75             typecast = tuple,
76             message  = "Poids différentiel de mutation, constant ou aléatoire dans l'intervalle, noté F",
77             minval   = 0.,
78             maxval   = 2.,
79             )
80         self.defineRequiredParameter(
81             name     = "CrossOverProbability_CR",
82             default  = 0.7,
83             typecast = float,
84             message  = "Probabilité de recombinaison ou de croisement, notée CR",
85             minval   = 0.,
86             maxval   = 1.,
87             )
88         self.defineRequiredParameter(
89             name     = "QualityCriterion",
90             default  = "AugmentedWeightedLeastSquares",
91             typecast = str,
92             message  = "Critère de qualité utilisé",
93             listval  = ["AugmentedWeightedLeastSquares","AWLS","DA",
94                         "WeightedLeastSquares","WLS",
95                         "LeastSquares","LS","L2",
96                         "AbsoluteValue","L1",
97                         "MaximumError","ME"],
98             )
99         self.defineRequiredParameter(
100             name     = "StoreInternalVariables",
101             default  = False,
102             typecast = bool,
103             message  = "Stockage des variables internes ou intermédiaires du calcul",
104             )
105         self.defineRequiredParameter(
106             name     = "StoreSupplementaryCalculations",
107             default  = [],
108             typecast = tuple,
109             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
110             listval  = [
111                 "BMA",
112                 "CostFunctionJ",
113                 "CostFunctionJb",
114                 "CostFunctionJo",
115                 "CostFunctionJAtCurrentOptimum",
116                 "CostFunctionJbAtCurrentOptimum",
117                 "CostFunctionJoAtCurrentOptimum",
118                 "CurrentOptimum",
119                 "CurrentState",
120                 "IndexOfOptimum",
121                 "Innovation",
122                 "InnovationAtCurrentState",
123                 "OMA",
124                 "OMB",
125                 "SimulatedObservationAtBackground",
126                 "SimulatedObservationAtCurrentOptimum",
127                 "SimulatedObservationAtCurrentState",
128                 "SimulatedObservationAtOptimum",
129                 ]
130             )
131         self.defineRequiredParameter(
132             name     = "SetSeed",
133             typecast = numpy.random.seed,
134             message  = "Graine fixée pour le générateur aléatoire",
135             )
136         self.defineRequiredParameter( # Pas de type
137             name     = "Bounds",
138             message  = "Liste des valeurs de bornes",
139             )
140         self.requireInputArguments(
141             mandatory= ("Xb", "Y", "HO", "R", "B" ),
142             )
143
144     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
145         self._pre_run(Parameters, Xb, Y, R, B, Q)
146         #
147         len_X = numpy.asarray(Xb).size
148         popsize = round(self._parameters["PopulationSize"]/len_X)
149         maxiter = min(self._parameters["MaximumNumberOfSteps"],round(self._parameters["MaximumNumberOfFunctionEvaluations"]/(popsize*len_X) - 1))
150         logging.debug("%s Nombre maximal de générations = %i, taille de la population à chaque génération = %i"%(self._name, maxiter, popsize*len_X))
151         #
152         # Opérateurs
153         # ----------
154         Hm = HO["Direct"].appliedTo
155         #
156         # Précalcul des inversions de B et R
157         # ----------------------------------
158         BI = B.getI()
159         RI = R.getI()
160         #
161         # Définition de la fonction-coût
162         # ------------------------------
163         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
164             _X  = numpy.asmatrix(numpy.ravel( x )).T
165             self.StoredVariables["CurrentState"].store( _X )
166             _HX = Hm( _X )
167             _HX = numpy.asmatrix(numpy.ravel( _HX )).T
168             _Innovation = Y - _HX
169             if self._toStore("SimulatedObservationAtCurrentState") or \
170                 self._toStore("SimulatedObservationAtCurrentOptimum"):
171                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
172             if self._toStore("InnovationAtCurrentState"):
173                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
174             #
175             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
176                 if BI is None or RI is None:
177                     raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
178                 Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
179                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
180             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
181                 if RI is None:
182                     raise ValueError("Observation error covariance matrix has to be properly defined!")
183                 Jb  = 0.
184                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
185             elif QualityMeasure in ["LeastSquares","LS","L2"]:
186                 Jb  = 0.
187                 Jo  = 0.5 * (_Innovation).T * (_Innovation)
188             elif QualityMeasure in ["AbsoluteValue","L1"]:
189                 Jb  = 0.
190                 Jo  = numpy.sum( numpy.abs(_Innovation) )
191             elif QualityMeasure in ["MaximumError","ME"]:
192                 Jb  = 0.
193                 Jo  = numpy.max( numpy.abs(_Innovation) )
194             #
195             J   = float( Jb ) + float( Jo )
196             #
197             self.StoredVariables["CostFunctionJb"].store( Jb )
198             self.StoredVariables["CostFunctionJo"].store( Jo )
199             self.StoredVariables["CostFunctionJ" ].store( J )
200             if self._toStore("IndexOfOptimum") or \
201                 self._toStore("CurrentOptimum") or \
202                 self._toStore("CostFunctionJAtCurrentOptimum") or \
203                 self._toStore("CostFunctionJbAtCurrentOptimum") or \
204                 self._toStore("CostFunctionJoAtCurrentOptimum") or \
205                 self._toStore("SimulatedObservationAtCurrentOptimum"):
206                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
207             if self._toStore("IndexOfOptimum"):
208                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
209             if self._toStore("CurrentOptimum"):
210                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
211             if self._toStore("SimulatedObservationAtCurrentOptimum"):
212                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
213             if self._toStore("CostFunctionJAtCurrentOptimum"):
214                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
215             if self._toStore("CostFunctionJbAtCurrentOptimum"):
216                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
217             if self._toStore("CostFunctionJoAtCurrentOptimum"):
218                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
219             return J
220         #
221         # Point de démarrage de l'optimisation : Xini = Xb
222         # ------------------------------------
223         Xini = numpy.ravel(Xb)
224         #
225         # Minimisation de la fonctionnelle
226         # --------------------------------
227         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
228         #
229         optResults = scipy.optimize.differential_evolution(
230             CostFunction,
231             self._parameters["Bounds"],
232             strategy      = str(self._parameters["Minimizer"]).lower(),
233             maxiter       = maxiter,
234             popsize       = popsize,
235             mutation      = self._parameters["MutationDifferentialWeight_F"],
236             recombination = self._parameters["CrossOverProbability_CR"],
237             disp          = self._parameters["optdisp"],
238             )
239         #
240         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
241         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
242         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
243         #
244         # Obtention de l'analyse
245         # ----------------------
246         Xa = numpy.ravel( Minimum )
247         #
248         self.StoredVariables["Analysis"].store( Xa )
249         #
250         # Calculs et/ou stockages supplémentaires
251         # ---------------------------------------
252         if self._toStore("OMA") or self._toStore("SimulatedObservationAtOptimum"):
253             if self._toStore("SimulatedObservationAtCurrentState"):
254                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
255             elif self._toStore("SimulatedObservationAtCurrentOptimum"):
256                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
257             else:
258                 HXa = Hm(Xa)
259         if self._toStore("Innovation") or \
260            self._toStore("OMB"):
261             d  = Y - HXb
262         if self._toStore("Innovation"):
263             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
264         if self._toStore("OMB"):
265             self.StoredVariables["OMB"].store( numpy.ravel(d) )
266         if self._toStore("BMA"):
267             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
268         if self._toStore("OMA"):
269             self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
270         if self._toStore("SimulatedObservationAtBackground"):
271             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) )
272         if self._toStore("SimulatedObservationAtOptimum"):
273             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
274         #
275         self._post_run()
276         return 0
277
278 # ==============================================================================
279 if __name__ == "__main__":
280     print('\n AUTODIAGNOSTIC \n')