]> SALOME platform Git repositories - modules/adao.git/blob - src/daComposant/daAlgorithms/DifferentialEvolution.py
Salome HOME
Minor improvements for internal variables
[modules/adao.git] / src / daComposant / daAlgorithms / DifferentialEvolution.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2021 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects
25 import numpy, scipy.optimize
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "DIFFERENTIALEVOLUTION")
31         self.defineRequiredParameter(
32             name     = "Minimizer",
33             default  = "BEST1BIN",
34             typecast = str,
35             message  = "Stratégie de minimisation utilisée",
36             listval  = [
37                 "BEST1BIN",
38                 "BEST1EXP",
39                 "RAND1EXP",
40                 "RANDTOBEST1EXP",
41                 # "CURRENTTOBEST1EXP",
42                 "BEST2EXP",
43                 "RAND2EXP",
44                 "RANDTOBEST1BIN",
45                 # "CURRENTTOBEST1BIN",
46                 "BEST2BIN",
47                 "RAND2BIN",
48                 "RAND1BIN",
49                 ],
50             )
51         self.defineRequiredParameter(
52             name     = "MaximumNumberOfSteps",
53             default  = 15000,
54             typecast = int,
55             message  = "Nombre maximal de générations",
56             minval   = 0,
57             )
58         self.defineRequiredParameter(
59             name     = "MaximumNumberOfFunctionEvaluations",
60             default  = 15000,
61             typecast = int,
62             message  = "Nombre maximal d'évaluations de la fonction",
63             minval   = -1,
64             )
65         self.defineRequiredParameter(
66             name     = "PopulationSize",
67             default  = 100,
68             typecast = int,
69             message  = "Taille approximative de la population à chaque génération",
70             minval   = 1,
71             )
72         self.defineRequiredParameter(
73             name     = "MutationDifferentialWeight_F",
74             default  = (0.5, 1),
75             typecast = tuple,
76             message  = "Poids différentiel de mutation, constant ou aléatoire dans l'intervalle, noté F",
77             minval   = 0.,
78             maxval   = 2.,
79             )
80         self.defineRequiredParameter(
81             name     = "CrossOverProbability_CR",
82             default  = 0.7,
83             typecast = float,
84             message  = "Probabilité de recombinaison ou de croisement, notée CR",
85             minval   = 0.,
86             maxval   = 1.,
87             )
88         self.defineRequiredParameter(
89             name     = "QualityCriterion",
90             default  = "AugmentedWeightedLeastSquares",
91             typecast = str,
92             message  = "Critère de qualité utilisé",
93             listval  = ["AugmentedWeightedLeastSquares","AWLS","DA",
94                         "WeightedLeastSquares","WLS",
95                         "LeastSquares","LS","L2",
96                         "AbsoluteValue","L1",
97                         "MaximumError","ME"],
98             )
99         self.defineRequiredParameter(
100             name     = "StoreInternalVariables",
101             default  = False,
102             typecast = bool,
103             message  = "Stockage des variables internes ou intermédiaires du calcul",
104             )
105         self.defineRequiredParameter(
106             name     = "StoreSupplementaryCalculations",
107             default  = [],
108             typecast = tuple,
109             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
110             listval  = [
111                 "Analysis",
112                 "BMA",
113                 "CostFunctionJ",
114                 "CostFunctionJb",
115                 "CostFunctionJo",
116                 "CostFunctionJAtCurrentOptimum",
117                 "CostFunctionJbAtCurrentOptimum",
118                 "CostFunctionJoAtCurrentOptimum",
119                 "CurrentIterationNumber",
120                 "CurrentOptimum",
121                 "CurrentState",
122                 "IndexOfOptimum",
123                 "Innovation",
124                 "InnovationAtCurrentState",
125                 "OMA",
126                 "OMB",
127                 "SimulatedObservationAtBackground",
128                 "SimulatedObservationAtCurrentOptimum",
129                 "SimulatedObservationAtCurrentState",
130                 "SimulatedObservationAtOptimum",
131                 ]
132             )
133         self.defineRequiredParameter(
134             name     = "SetSeed",
135             typecast = numpy.random.seed,
136             message  = "Graine fixée pour le générateur aléatoire",
137             )
138         self.defineRequiredParameter( # Pas de type
139             name     = "Bounds",
140             message  = "Liste des valeurs de bornes",
141             )
142         self.requireInputArguments(
143             mandatory= ("Xb", "Y", "HO", "R", "B" ),
144             )
145         self.setAttributes(tags=(
146             "Optimization",
147             "NonLinear",
148             "MetaHeuristic",
149             "Population",
150             ))
151
152     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
153         self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
154         #
155         len_X = numpy.asarray(Xb).size
156         popsize = round(self._parameters["PopulationSize"]/len_X)
157         maxiter = min(self._parameters["MaximumNumberOfSteps"],round(self._parameters["MaximumNumberOfFunctionEvaluations"]/(popsize*len_X) - 1))
158         logging.debug("%s Nombre maximal de générations = %i, taille de la population à chaque génération = %i"%(self._name, maxiter, popsize*len_X))
159         #
160         # Opérateurs
161         # ----------
162         Hm = HO["Direct"].appliedTo
163         #
164         # Précalcul des inversions de B et R
165         # ----------------------------------
166         BI = B.getI()
167         RI = R.getI()
168         #
169         # Définition de la fonction-coût
170         # ------------------------------
171         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
172             _X  = numpy.asmatrix(numpy.ravel( x )).T
173             self.StoredVariables["CurrentState"].store( _X )
174             _HX = Hm( _X )
175             _HX = numpy.asmatrix(numpy.ravel( _HX )).T
176             _Innovation = Y - _HX
177             if self._toStore("SimulatedObservationAtCurrentState") or \
178                 self._toStore("SimulatedObservationAtCurrentOptimum"):
179                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
180             if self._toStore("InnovationAtCurrentState"):
181                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
182             #
183             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
184                 if BI is None or RI is None:
185                     raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
186                 Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
187                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
188             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
189                 if RI is None:
190                     raise ValueError("Observation error covariance matrix has to be properly defined!")
191                 Jb  = 0.
192                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
193             elif QualityMeasure in ["LeastSquares","LS","L2"]:
194                 Jb  = 0.
195                 Jo  = 0.5 * (_Innovation).T * (_Innovation)
196             elif QualityMeasure in ["AbsoluteValue","L1"]:
197                 Jb  = 0.
198                 Jo  = numpy.sum( numpy.abs(_Innovation) )
199             elif QualityMeasure in ["MaximumError","ME"]:
200                 Jb  = 0.
201                 Jo  = numpy.max( numpy.abs(_Innovation) )
202             #
203             J   = float( Jb ) + float( Jo )
204             #
205             self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
206             self.StoredVariables["CostFunctionJb"].store( Jb )
207             self.StoredVariables["CostFunctionJo"].store( Jo )
208             self.StoredVariables["CostFunctionJ" ].store( J )
209             if self._toStore("IndexOfOptimum") or \
210                 self._toStore("CurrentOptimum") or \
211                 self._toStore("CostFunctionJAtCurrentOptimum") or \
212                 self._toStore("CostFunctionJbAtCurrentOptimum") or \
213                 self._toStore("CostFunctionJoAtCurrentOptimum") or \
214                 self._toStore("SimulatedObservationAtCurrentOptimum"):
215                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
216             if self._toStore("IndexOfOptimum"):
217                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
218             if self._toStore("CurrentOptimum"):
219                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
220             if self._toStore("SimulatedObservationAtCurrentOptimum"):
221                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
222             if self._toStore("CostFunctionJAtCurrentOptimum"):
223                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
224             if self._toStore("CostFunctionJbAtCurrentOptimum"):
225                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
226             if self._toStore("CostFunctionJoAtCurrentOptimum"):
227                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
228             return J
229         #
230         # Point de démarrage de l'optimisation : Xini = Xb
231         # ------------------------------------
232         Xini = numpy.ravel(Xb)
233         #
234         # Minimisation de la fonctionnelle
235         # --------------------------------
236         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
237         #
238         optResults = scipy.optimize.differential_evolution(
239             CostFunction,
240             self._parameters["Bounds"],
241             strategy      = str(self._parameters["Minimizer"]).lower(),
242             maxiter       = maxiter,
243             popsize       = popsize,
244             mutation      = self._parameters["MutationDifferentialWeight_F"],
245             recombination = self._parameters["CrossOverProbability_CR"],
246             disp          = self._parameters["optdisp"],
247             )
248         #
249         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
250         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
251         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
252         #
253         # Obtention de l'analyse
254         # ----------------------
255         Xa = numpy.ravel( Minimum )
256         #
257         self.StoredVariables["Analysis"].store( Xa )
258         #
259         # Calculs et/ou stockages supplémentaires
260         # ---------------------------------------
261         if self._toStore("OMA") or self._toStore("SimulatedObservationAtOptimum"):
262             if self._toStore("SimulatedObservationAtCurrentState"):
263                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
264             elif self._toStore("SimulatedObservationAtCurrentOptimum"):
265                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
266             else:
267                 HXa = Hm(Xa)
268         if self._toStore("Innovation") or \
269            self._toStore("OMB"):
270             d  = Y - HXb
271         if self._toStore("Innovation"):
272             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
273         if self._toStore("OMB"):
274             self.StoredVariables["OMB"].store( numpy.ravel(d) )
275         if self._toStore("BMA"):
276             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
277         if self._toStore("OMA"):
278             self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
279         if self._toStore("SimulatedObservationAtBackground"):
280             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) )
281         if self._toStore("SimulatedObservationAtOptimum"):
282             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
283         #
284         self._post_run()
285         return 0
286
287 # ==============================================================================
288 if __name__ == "__main__":
289     print('\n AUTODIAGNOSTIC\n')