]> SALOME platform Git repositories - modules/adao.git/blob - src/daComposant/daAlgorithms/DifferentialEvolution.py
Salome HOME
Minor internal modifications and bounds corrections
[modules/adao.git] / src / daComposant / daAlgorithms / DifferentialEvolution.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2021 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects
25 import numpy, scipy.optimize
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "DIFFERENTIALEVOLUTION")
31         self.defineRequiredParameter(
32             name     = "Minimizer",
33             default  = "BEST1BIN",
34             typecast = str,
35             message  = "Stratégie de minimisation utilisée",
36             listval  = [
37                 "BEST1BIN",
38                 "BEST1EXP",
39                 "BEST2BIN",
40                 "BEST2EXP",
41                 "RAND1BIN",
42                 "RAND1EXP",
43                 "RAND2BIN",
44                 "RAND2EXP",
45                 "RANDTOBEST1BIN",
46                 "RANDTOBEST1EXP",
47                 ],
48             listadv  = [
49                 "CURRENTTOBEST1EXP",
50                 "CURRENTTOBEST1BIN",
51                 ],
52             )
53         self.defineRequiredParameter(
54             name     = "MaximumNumberOfSteps",
55             default  = 15000,
56             typecast = int,
57             message  = "Nombre maximal de générations",
58             minval   = 0,
59             )
60         self.defineRequiredParameter(
61             name     = "MaximumNumberOfFunctionEvaluations",
62             default  = 15000,
63             typecast = int,
64             message  = "Nombre maximal d'évaluations de la fonction",
65             minval   = -1,
66             )
67         self.defineRequiredParameter(
68             name     = "PopulationSize",
69             default  = 100,
70             typecast = int,
71             message  = "Taille approximative de la population à chaque génération",
72             minval   = 1,
73             )
74         self.defineRequiredParameter(
75             name     = "MutationDifferentialWeight_F",
76             default  = (0.5, 1),
77             typecast = tuple,
78             message  = "Poids différentiel de mutation, constant ou aléatoire dans l'intervalle, noté F",
79             minval   = 0.,
80             maxval   = 2.,
81             )
82         self.defineRequiredParameter(
83             name     = "CrossOverProbability_CR",
84             default  = 0.7,
85             typecast = float,
86             message  = "Probabilité de recombinaison ou de croisement, notée CR",
87             minval   = 0.,
88             maxval   = 1.,
89             )
90         self.defineRequiredParameter(
91             name     = "QualityCriterion",
92             default  = "AugmentedWeightedLeastSquares",
93             typecast = str,
94             message  = "Critère de qualité utilisé",
95             listval  = [
96                 "AugmentedWeightedLeastSquares","AWLS","DA",
97                 "WeightedLeastSquares","WLS",
98                 "LeastSquares","LS","L2",
99                 "AbsoluteValue","L1",
100                 "MaximumError","ME",
101                 ],
102             )
103         self.defineRequiredParameter(
104             name     = "StoreInternalVariables",
105             default  = False,
106             typecast = bool,
107             message  = "Stockage des variables internes ou intermédiaires du calcul",
108             )
109         self.defineRequiredParameter(
110             name     = "StoreSupplementaryCalculations",
111             default  = [],
112             typecast = tuple,
113             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
114             listval  = [
115                 "Analysis",
116                 "BMA",
117                 "CostFunctionJ",
118                 "CostFunctionJb",
119                 "CostFunctionJo",
120                 "CostFunctionJAtCurrentOptimum",
121                 "CostFunctionJbAtCurrentOptimum",
122                 "CostFunctionJoAtCurrentOptimum",
123                 "CurrentIterationNumber",
124                 "CurrentOptimum",
125                 "CurrentState",
126                 "IndexOfOptimum",
127                 "Innovation",
128                 "InnovationAtCurrentState",
129                 "OMA",
130                 "OMB",
131                 "SimulatedObservationAtBackground",
132                 "SimulatedObservationAtCurrentOptimum",
133                 "SimulatedObservationAtCurrentState",
134                 "SimulatedObservationAtOptimum",
135                 ]
136             )
137         self.defineRequiredParameter(
138             name     = "SetSeed",
139             typecast = numpy.random.seed,
140             message  = "Graine fixée pour le générateur aléatoire",
141             )
142         self.defineRequiredParameter( # Pas de type
143             name     = "Bounds",
144             message  = "Liste des valeurs de bornes",
145             )
146         self.requireInputArguments(
147             mandatory= ("Xb", "Y", "HO", "R", "B" ),
148             )
149         self.setAttributes(tags=(
150             "Optimization",
151             "NonLinear",
152             "MetaHeuristic",
153             "Population",
154             ))
155
156     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
157         self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
158         #
159         len_X = numpy.asarray(Xb).size
160         popsize = round(self._parameters["PopulationSize"]/len_X)
161         maxiter = min(self._parameters["MaximumNumberOfSteps"],round(self._parameters["MaximumNumberOfFunctionEvaluations"]/(popsize*len_X) - 1))
162         logging.debug("%s Nombre maximal de générations = %i, taille de la population à chaque génération = %i"%(self._name, maxiter, popsize*len_X))
163         #
164         # Opérateurs
165         # ----------
166         Hm = HO["Direct"].appliedTo
167         #
168         # Précalcul des inversions de B et R
169         # ----------------------------------
170         BI = B.getI()
171         RI = R.getI()
172         #
173         # Définition de la fonction-coût
174         # ------------------------------
175         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
176             _X  = numpy.asmatrix(numpy.ravel( x )).T
177             self.StoredVariables["CurrentState"].store( _X )
178             _HX = Hm( _X )
179             _HX = numpy.asmatrix(numpy.ravel( _HX )).T
180             _Innovation = Y - _HX
181             if self._toStore("SimulatedObservationAtCurrentState") or \
182                 self._toStore("SimulatedObservationAtCurrentOptimum"):
183                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
184             if self._toStore("InnovationAtCurrentState"):
185                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
186             #
187             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
188                 if BI is None or RI is None:
189                     raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
190                 Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
191                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
192             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
193                 if RI is None:
194                     raise ValueError("Observation error covariance matrix has to be properly defined!")
195                 Jb  = 0.
196                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
197             elif QualityMeasure in ["LeastSquares","LS","L2"]:
198                 Jb  = 0.
199                 Jo  = 0.5 * (_Innovation).T * (_Innovation)
200             elif QualityMeasure in ["AbsoluteValue","L1"]:
201                 Jb  = 0.
202                 Jo  = numpy.sum( numpy.abs(_Innovation) )
203             elif QualityMeasure in ["MaximumError","ME"]:
204                 Jb  = 0.
205                 Jo  = numpy.max( numpy.abs(_Innovation) )
206             #
207             J   = float( Jb ) + float( Jo )
208             #
209             self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
210             self.StoredVariables["CostFunctionJb"].store( Jb )
211             self.StoredVariables["CostFunctionJo"].store( Jo )
212             self.StoredVariables["CostFunctionJ" ].store( J )
213             if self._toStore("IndexOfOptimum") or \
214                 self._toStore("CurrentOptimum") or \
215                 self._toStore("CostFunctionJAtCurrentOptimum") or \
216                 self._toStore("CostFunctionJbAtCurrentOptimum") or \
217                 self._toStore("CostFunctionJoAtCurrentOptimum") or \
218                 self._toStore("SimulatedObservationAtCurrentOptimum"):
219                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
220             if self._toStore("IndexOfOptimum"):
221                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
222             if self._toStore("CurrentOptimum"):
223                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
224             if self._toStore("SimulatedObservationAtCurrentOptimum"):
225                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
226             if self._toStore("CostFunctionJAtCurrentOptimum"):
227                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
228             if self._toStore("CostFunctionJbAtCurrentOptimum"):
229                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
230             if self._toStore("CostFunctionJoAtCurrentOptimum"):
231                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
232             return J
233         #
234         # Point de démarrage de l'optimisation : Xini = Xb
235         # ------------------------------------
236         Xini = numpy.ravel(Xb)
237         #
238         # Minimisation de la fonctionnelle
239         # --------------------------------
240         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
241         #
242         optResults = scipy.optimize.differential_evolution(
243             CostFunction,
244             self._parameters["Bounds"],
245             strategy      = str(self._parameters["Minimizer"]).lower(),
246             maxiter       = maxiter,
247             popsize       = popsize,
248             mutation      = self._parameters["MutationDifferentialWeight_F"],
249             recombination = self._parameters["CrossOverProbability_CR"],
250             disp          = self._parameters["optdisp"],
251             )
252         #
253         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
254         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
255         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
256         #
257         # Obtention de l'analyse
258         # ----------------------
259         Xa = numpy.ravel( Minimum )
260         #
261         self.StoredVariables["Analysis"].store( Xa )
262         #
263         # Calculs et/ou stockages supplémentaires
264         # ---------------------------------------
265         if self._toStore("OMA") or self._toStore("SimulatedObservationAtOptimum"):
266             if self._toStore("SimulatedObservationAtCurrentState"):
267                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
268             elif self._toStore("SimulatedObservationAtCurrentOptimum"):
269                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
270             else:
271                 HXa = Hm(Xa)
272         if self._toStore("Innovation") or \
273            self._toStore("OMB"):
274             d  = Y - HXb
275         if self._toStore("Innovation"):
276             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
277         if self._toStore("OMB"):
278             self.StoredVariables["OMB"].store( numpy.ravel(d) )
279         if self._toStore("BMA"):
280             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
281         if self._toStore("OMA"):
282             self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
283         if self._toStore("SimulatedObservationAtBackground"):
284             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) )
285         if self._toStore("SimulatedObservationAtOptimum"):
286             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
287         #
288         self._post_run()
289         return 0
290
291 # ==============================================================================
292 if __name__ == "__main__":
293     print('\n AUTODIAGNOSTIC\n')