Salome HOME
Documentation update with features and review corrections
[modules/adao.git] / src / daComposant / daAlgorithms / DifferentialEvolution.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2024 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import numpy, logging, scipy.optimize
24 from daCore import BasicObjects
25 from daCore.PlatformInfo import vfloat
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "DIFFERENTIALEVOLUTION")
31         self.defineRequiredParameter(
32             name     = "Minimizer",
33             default  = "BEST1BIN",
34             typecast = str,
35             message  = "Stratégie de minimisation utilisée",
36             listval  = [
37                 "BEST1BIN",
38                 "BEST1EXP",
39                 "BEST2BIN",
40                 "BEST2EXP",
41                 "RAND1BIN",
42                 "RAND1EXP",
43                 "RAND2BIN",
44                 "RAND2EXP",
45                 "RANDTOBEST1BIN",
46                 "RANDTOBEST1EXP",
47             ],
48             listadv  = [
49                 "CURRENTTOBEST1EXP",
50                 "CURRENTTOBEST1BIN",
51             ],
52         )
53         self.defineRequiredParameter(
54             name     = "MaximumNumberOfIterations",
55             default  = 15000,
56             typecast = int,
57             message  = "Nombre maximal de générations",
58             minval   = 0,
59             oldname  = "MaximumNumberOfSteps",
60         )
61         self.defineRequiredParameter(
62             name     = "MaximumNumberOfFunctionEvaluations",
63             default  = 15000,
64             typecast = int,
65             message  = "Nombre maximal d'évaluations de la fonction",
66             minval   = -1,
67         )
68         self.defineRequiredParameter(
69             name     = "SetSeed",
70             typecast = numpy.random.seed,
71             message  = "Graine fixée pour le générateur aléatoire",
72         )
73         self.defineRequiredParameter(
74             name     = "PopulationSize",
75             default  = 100,
76             typecast = int,
77             message  = "Taille approximative de la population à chaque génération",
78             minval   = 1,
79         )
80         self.defineRequiredParameter(
81             name     = "MutationDifferentialWeight_F",
82             default  = (0.5, 1),
83             typecast = tuple,
84             message  = "Poids différentiel de mutation, constant ou aléatoire dans l'intervalle, noté F",
85             minval   = 0.,
86             maxval   = 2.,
87         )
88         self.defineRequiredParameter(
89             name     = "CrossOverProbability_CR",
90             default  = 0.7,
91             typecast = float,
92             message  = "Probabilité de recombinaison ou de croisement, notée CR",
93             minval   = 0.,
94             maxval   = 1.,
95         )
96         self.defineRequiredParameter(
97             name     = "QualityCriterion",
98             default  = "AugmentedWeightedLeastSquares",
99             typecast = str,
100             message  = "Critère de qualité utilisé",
101             listval  = [
102                 "AugmentedWeightedLeastSquares", "AWLS", "DA",
103                 "WeightedLeastSquares", "WLS",
104                 "LeastSquares", "LS", "L2",
105                 "AbsoluteValue", "L1",
106                 "MaximumError", "ME", "Linf",
107             ],
108         )
109         self.defineRequiredParameter(
110             name     = "StoreInternalVariables",
111             default  = False,
112             typecast = bool,
113             message  = "Stockage des variables internes ou intermédiaires du calcul",
114         )
115         self.defineRequiredParameter(
116             name     = "StoreSupplementaryCalculations",
117             default  = [],
118             typecast = tuple,
119             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
120             listval  = [
121                 "Analysis",
122                 "BMA",
123                 "CostFunctionJ",
124                 "CostFunctionJb",
125                 "CostFunctionJo",
126                 "CostFunctionJAtCurrentOptimum",
127                 "CostFunctionJbAtCurrentOptimum",
128                 "CostFunctionJoAtCurrentOptimum",
129                 "CurrentIterationNumber",
130                 "CurrentOptimum",
131                 "CurrentState",
132                 "IndexOfOptimum",
133                 "Innovation",
134                 "InnovationAtCurrentState",
135                 "OMA",
136                 "OMB",
137                 "SimulatedObservationAtBackground",
138                 "SimulatedObservationAtCurrentOptimum",
139                 "SimulatedObservationAtCurrentState",
140                 "SimulatedObservationAtOptimum",
141             ]
142         )
143         self.defineRequiredParameter(  # Pas de type
144             name     = "Bounds",
145             message  = "Liste des valeurs de bornes",
146         )
147         self.requireInputArguments(
148             mandatory= ("Xb", "Y", "HO", "R", "B"),
149         )
150         self.setAttributes(
151             tags=(
152                 "Optimization",
153                 "NonLinear",
154                 "MetaHeuristic",
155                 "Population",
156             ),
157             features=(
158                 "NonLocalOptimization",
159                 "DerivativeFree",
160             ),
161         )
162
163     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
164         self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
165         #
166         len_X = numpy.asarray(Xb).size
167         popsize = round(self._parameters["PopulationSize"] / len_X)
168         maxiter = min(self._parameters["MaximumNumberOfIterations"], round(self._parameters["MaximumNumberOfFunctionEvaluations"] / (popsize * len_X) - 1))  # noqa: E501
169         logging.debug("%s Nombre maximal de générations = %i, taille de la population à chaque génération = %i"%(self._name, maxiter, popsize * len_X))  # noqa: E501
170         #
171         Hm = HO["Direct"].appliedTo
172         #
173         BI = B.getI()
174         RI = R.getI()
175
176         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
177             _X  = numpy.ravel( x ).reshape((-1, 1))
178             _HX = numpy.ravel( Hm( _X ) ).reshape((-1, 1))
179             _Innovation = Y - _HX
180             self.StoredVariables["CurrentState"].store( _X )
181             if self._toStore("SimulatedObservationAtCurrentState") or \
182                     self._toStore("SimulatedObservationAtCurrentOptimum"):
183                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
184             if self._toStore("InnovationAtCurrentState"):
185                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
186             #
187             if QualityMeasure in ["AugmentedWeightedLeastSquares", "AWLS", "DA"]:
188                 if BI is None or RI is None:
189                     raise ValueError("Background and Observation error covariance matrices has to be properly defined!")
190                 Jb  = vfloat(0.5 * (_X - Xb).T @ (BI @ (_X - Xb)))
191                 Jo  = vfloat(0.5 * _Innovation.T @ (RI @ _Innovation))
192             elif QualityMeasure in ["WeightedLeastSquares", "WLS"]:
193                 if RI is None:
194                     raise ValueError("Observation error covariance matrix has to be properly defined!")
195                 Jb  = 0.
196                 Jo  = vfloat(0.5 * _Innovation.T @ (RI @ _Innovation))
197             elif QualityMeasure in ["LeastSquares", "LS", "L2"]:
198                 Jb  = 0.
199                 Jo  = vfloat(0.5 * _Innovation.T @ _Innovation)
200             elif QualityMeasure in ["AbsoluteValue", "L1"]:
201                 Jb  = 0.
202                 Jo  = vfloat(numpy.sum( numpy.abs(_Innovation) ))
203             elif QualityMeasure in ["MaximumError", "ME", "Linf"]:
204                 Jb  = 0.
205                 Jo  = vfloat(numpy.max( numpy.abs(_Innovation) ))
206             #
207             J   = Jb + Jo
208             #
209             self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
210             self.StoredVariables["CostFunctionJb"].store( Jb )
211             self.StoredVariables["CostFunctionJo"].store( Jo )
212             self.StoredVariables["CostFunctionJ" ].store( J )
213             if self._toStore("IndexOfOptimum") or \
214                     self._toStore("CurrentOptimum") or \
215                     self._toStore("CostFunctionJAtCurrentOptimum") or \
216                     self._toStore("CostFunctionJbAtCurrentOptimum") or \
217                     self._toStore("CostFunctionJoAtCurrentOptimum") or \
218                     self._toStore("SimulatedObservationAtCurrentOptimum"):
219                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
220             if self._toStore("IndexOfOptimum"):
221                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
222             if self._toStore("CurrentOptimum"):
223                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
224             if self._toStore("SimulatedObservationAtCurrentOptimum"):
225                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )  # noqa: E501
226             if self._toStore("CostFunctionJAtCurrentOptimum"):
227                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )  # noqa: E501
228             if self._toStore("CostFunctionJbAtCurrentOptimum"):
229                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )  # noqa: E501
230             if self._toStore("CostFunctionJoAtCurrentOptimum"):
231                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )  # noqa: E501
232             return J
233         #
234         Xini = numpy.ravel(Xb)
235         #
236         # Minimisation de la fonctionnelle
237         # --------------------------------
238         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
239         #
240         scipy.optimize.differential_evolution(
241             CostFunction,
242             self._parameters["Bounds"],
243             strategy      = str(self._parameters["Minimizer"]).lower(),
244             maxiter       = maxiter,
245             popsize       = popsize,
246             mutation      = self._parameters["MutationDifferentialWeight_F"],
247             recombination = self._parameters["CrossOverProbability_CR"],
248             disp          = self._parameters["optdisp"],
249             x0            = Xini,
250         )
251         #
252         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
253         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
254         #
255         # Obtention de l'analyse
256         # ----------------------
257         Xa = Minimum
258         #
259         self.StoredVariables["Analysis"].store( Xa )
260         #
261         # Calculs et/ou stockages supplémentaires
262         # ---------------------------------------
263         if self._toStore("OMA") or \
264                 self._toStore("SimulatedObservationAtOptimum"):
265             if self._toStore("SimulatedObservationAtCurrentState"):
266                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
267             elif self._toStore("SimulatedObservationAtCurrentOptimum"):
268                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
269             else:
270                 HXa = Hm(Xa)
271             HXa = HXa.reshape((-1, 1))
272         if self._toStore("Innovation") or \
273                 self._toStore("OMB") or \
274                 self._toStore("SimulatedObservationAtBackground"):
275             HXb = Hm(Xb).reshape((-1, 1))
276             Innovation = Y - HXb
277         if self._toStore("Innovation"):
278             self.StoredVariables["Innovation"].store( Innovation )
279         if self._toStore("OMB"):
280             self.StoredVariables["OMB"].store( Innovation )
281         if self._toStore("BMA"):
282             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
283         if self._toStore("OMA"):
284             self.StoredVariables["OMA"].store( Y - HXa )
285         if self._toStore("SimulatedObservationAtBackground"):
286             self.StoredVariables["SimulatedObservationAtBackground"].store( HXb )
287         if self._toStore("SimulatedObservationAtOptimum"):
288             self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa )
289         #
290         self._post_run(HO, EM)
291         return 0
292
293 # ==============================================================================
294 if __name__ == "__main__":
295     print("\n AUTODIAGNOSTIC\n")