]> SALOME platform Git repositories - modules/adao.git/blob - src/daComposant/daAlgorithms/DifferentialEvolution.py
Salome HOME
1a4ed93897dfa839dcdfa3dde57837b59564c6a5
[modules/adao.git] / src / daComposant / daAlgorithms / DifferentialEvolution.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2022 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import numpy, logging, scipy.optimize
24 from daCore import BasicObjects
25
26 # ==============================================================================
27 class ElementaryAlgorithm(BasicObjects.Algorithm):
28     def __init__(self):
29         BasicObjects.Algorithm.__init__(self, "DIFFERENTIALEVOLUTION")
30         self.defineRequiredParameter(
31             name     = "Minimizer",
32             default  = "BEST1BIN",
33             typecast = str,
34             message  = "Stratégie de minimisation utilisée",
35             listval  = [
36                 "BEST1BIN",
37                 "BEST1EXP",
38                 "BEST2BIN",
39                 "BEST2EXP",
40                 "RAND1BIN",
41                 "RAND1EXP",
42                 "RAND2BIN",
43                 "RAND2EXP",
44                 "RANDTOBEST1BIN",
45                 "RANDTOBEST1EXP",
46                 ],
47             listadv  = [
48                 "CURRENTTOBEST1EXP",
49                 "CURRENTTOBEST1BIN",
50                 ],
51             )
52         self.defineRequiredParameter(
53             name     = "MaximumNumberOfIterations",
54             default  = 15000,
55             typecast = int,
56             message  = "Nombre maximal de générations",
57             minval   = 0,
58             oldname  = "MaximumNumberOfSteps",
59             )
60         self.defineRequiredParameter(
61             name     = "MaximumNumberOfFunctionEvaluations",
62             default  = 15000,
63             typecast = int,
64             message  = "Nombre maximal d'évaluations de la fonction",
65             minval   = -1,
66             )
67         self.defineRequiredParameter(
68             name     = "SetSeed",
69             typecast = numpy.random.seed,
70             message  = "Graine fixée pour le générateur aléatoire",
71             )
72         self.defineRequiredParameter(
73             name     = "PopulationSize",
74             default  = 100,
75             typecast = int,
76             message  = "Taille approximative de la population à chaque génération",
77             minval   = 1,
78             )
79         self.defineRequiredParameter(
80             name     = "MutationDifferentialWeight_F",
81             default  = (0.5, 1),
82             typecast = tuple,
83             message  = "Poids différentiel de mutation, constant ou aléatoire dans l'intervalle, noté F",
84             minval   = 0.,
85             maxval   = 2.,
86             )
87         self.defineRequiredParameter(
88             name     = "CrossOverProbability_CR",
89             default  = 0.7,
90             typecast = float,
91             message  = "Probabilité de recombinaison ou de croisement, notée CR",
92             minval   = 0.,
93             maxval   = 1.,
94             )
95         self.defineRequiredParameter(
96             name     = "QualityCriterion",
97             default  = "AugmentedWeightedLeastSquares",
98             typecast = str,
99             message  = "Critère de qualité utilisé",
100             listval  = [
101                 "AugmentedWeightedLeastSquares", "AWLS", "DA",
102                 "WeightedLeastSquares", "WLS",
103                 "LeastSquares", "LS", "L2",
104                 "AbsoluteValue", "L1",
105                 "MaximumError", "ME",
106                 ],
107             )
108         self.defineRequiredParameter(
109             name     = "StoreInternalVariables",
110             default  = False,
111             typecast = bool,
112             message  = "Stockage des variables internes ou intermédiaires du calcul",
113             )
114         self.defineRequiredParameter(
115             name     = "StoreSupplementaryCalculations",
116             default  = [],
117             typecast = tuple,
118             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
119             listval  = [
120                 "Analysis",
121                 "BMA",
122                 "CostFunctionJ",
123                 "CostFunctionJb",
124                 "CostFunctionJo",
125                 "CostFunctionJAtCurrentOptimum",
126                 "CostFunctionJbAtCurrentOptimum",
127                 "CostFunctionJoAtCurrentOptimum",
128                 "CurrentIterationNumber",
129                 "CurrentOptimum",
130                 "CurrentState",
131                 "IndexOfOptimum",
132                 "Innovation",
133                 "InnovationAtCurrentState",
134                 "OMA",
135                 "OMB",
136                 "SimulatedObservationAtBackground",
137                 "SimulatedObservationAtCurrentOptimum",
138                 "SimulatedObservationAtCurrentState",
139                 "SimulatedObservationAtOptimum",
140                 ]
141             )
142         self.defineRequiredParameter( # Pas de type
143             name     = "Bounds",
144             message  = "Liste des valeurs de bornes",
145             )
146         self.requireInputArguments(
147             mandatory= ("Xb", "Y", "HO", "R", "B"),
148             )
149         self.setAttributes(tags=(
150             "Optimization",
151             "NonLinear",
152             "MetaHeuristic",
153             "Population",
154             ))
155
156     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
157         self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
158         #
159         len_X = numpy.asarray(Xb).size
160         popsize = round(self._parameters["PopulationSize"]/len_X)
161         maxiter = min(self._parameters["MaximumNumberOfIterations"],round(self._parameters["MaximumNumberOfFunctionEvaluations"]/(popsize*len_X) - 1))
162         logging.debug("%s Nombre maximal de générations = %i, taille de la population à chaque génération = %i"%(self._name, maxiter, popsize*len_X))
163         #
164         Hm = HO["Direct"].appliedTo
165         #
166         BI = B.getI()
167         RI = R.getI()
168         #
169         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
170             _X  = numpy.ravel( x ).reshape((-1,1))
171             _HX = numpy.ravel( Hm( _X ) ).reshape((-1,1))
172             _Innovation = Y - _HX
173             self.StoredVariables["CurrentState"].store( _X )
174             if self._toStore("SimulatedObservationAtCurrentState") or \
175                 self._toStore("SimulatedObservationAtCurrentOptimum"):
176                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
177             if self._toStore("InnovationAtCurrentState"):
178                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
179             #
180             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
181                 if BI is None or RI is None:
182                     raise ValueError("Background and Observation error covariance matrices has to be properly defined!")
183                 Jb  = 0.5 * (_X - Xb).T @ (BI @ (_X - Xb))
184                 Jo  = 0.5 * _Innovation.T @ (RI @ _Innovation)
185             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
186                 if RI is None:
187                     raise ValueError("Observation error covariance matrix has to be properly defined!")
188                 Jb  = 0.
189                 Jo  = 0.5 * _Innovation.T @ (RI @ _Innovation)
190             elif QualityMeasure in ["LeastSquares","LS","L2"]:
191                 Jb  = 0.
192                 Jo  = 0.5 * _Innovation.T @ _Innovation
193             elif QualityMeasure in ["AbsoluteValue","L1"]:
194                 Jb  = 0.
195                 Jo  = numpy.sum( numpy.abs(_Innovation) )
196             elif QualityMeasure in ["MaximumError","ME"]:
197                 Jb  = 0.
198                 Jo  = numpy.max( numpy.abs(_Innovation) )
199             #
200             J   = float( Jb ) + float( Jo )
201             #
202             self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
203             self.StoredVariables["CostFunctionJb"].store( Jb )
204             self.StoredVariables["CostFunctionJo"].store( Jo )
205             self.StoredVariables["CostFunctionJ" ].store( J )
206             if self._toStore("IndexOfOptimum") or \
207                 self._toStore("CurrentOptimum") or \
208                 self._toStore("CostFunctionJAtCurrentOptimum") or \
209                 self._toStore("CostFunctionJbAtCurrentOptimum") or \
210                 self._toStore("CostFunctionJoAtCurrentOptimum") or \
211                 self._toStore("SimulatedObservationAtCurrentOptimum"):
212                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
213             if self._toStore("IndexOfOptimum"):
214                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
215             if self._toStore("CurrentOptimum"):
216                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
217             if self._toStore("SimulatedObservationAtCurrentOptimum"):
218                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
219             if self._toStore("CostFunctionJAtCurrentOptimum"):
220                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
221             if self._toStore("CostFunctionJbAtCurrentOptimum"):
222                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
223             if self._toStore("CostFunctionJoAtCurrentOptimum"):
224                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
225             return J
226         #
227         Xini = numpy.ravel(Xb)
228         #
229         # Minimisation de la fonctionnelle
230         # --------------------------------
231         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
232         #
233         optResults = scipy.optimize.differential_evolution(
234             CostFunction,
235             self._parameters["Bounds"],
236             strategy      = str(self._parameters["Minimizer"]).lower(),
237             maxiter       = maxiter,
238             popsize       = popsize,
239             mutation      = self._parameters["MutationDifferentialWeight_F"],
240             recombination = self._parameters["CrossOverProbability_CR"],
241             disp          = self._parameters["optdisp"],
242             )
243         #
244         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
245         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
246         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
247         #
248         # Obtention de l'analyse
249         # ----------------------
250         Xa = Minimum
251         #
252         self.StoredVariables["Analysis"].store( Xa )
253         #
254         # Calculs et/ou stockages supplémentaires
255         # ---------------------------------------
256         if self._toStore("OMA") or \
257             self._toStore("SimulatedObservationAtOptimum"):
258             if self._toStore("SimulatedObservationAtCurrentState"):
259                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
260             elif self._toStore("SimulatedObservationAtCurrentOptimum"):
261                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
262             else:
263                 HXa = Hm(Xa)
264             HXa = HXa.reshape((-1,1))
265         if self._toStore("Innovation") or \
266             self._toStore("OMB") or \
267             self._toStore("SimulatedObservationAtBackground"):
268             HXb = Hm(Xb).reshape((-1,1))
269             Innovation = Y - HXb
270         if self._toStore("Innovation"):
271             self.StoredVariables["Innovation"].store( Innovation )
272         if self._toStore("OMB"):
273             self.StoredVariables["OMB"].store( Innovation )
274         if self._toStore("BMA"):
275             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
276         if self._toStore("OMA"):
277             self.StoredVariables["OMA"].store( Y - HXa )
278         if self._toStore("SimulatedObservationAtBackground"):
279             self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
280         if self._toStore("SimulatedObservationAtOptimum"):
281             self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
282         #
283         self._post_run(HO)
284         return 0
285
286 # ==============================================================================
287 if __name__ == "__main__":
288     print('\n AUTODIAGNOSTIC\n')