]> SALOME platform Git repositories - modules/adao.git/blob - src/daComposant/daAlgorithms/DifferentialEvolution.py
Salome HOME
Improvement of algorithms arguments validation and tests
[modules/adao.git] / src / daComposant / daAlgorithms / DifferentialEvolution.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2020 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects
25 import numpy, scipy.optimize
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "DIFFERENTIALEVOLUTION")
31         self.defineRequiredParameter(
32             name     = "Minimizer",
33             default  = "BEST1BIN",
34             typecast = str,
35             message  = "Stratégie de minimisation utilisée",
36             listval  = [
37                 "BEST1BIN",
38                 "BEST1EXP",
39                 "RAND1EXP",
40                 "RANDTOBEST1EXP",
41                 # "CURRENTTOBEST1EXP",
42                 "BEST2EXP",
43                 "RAND2EXP",
44                 "RANDTOBEST1BIN",
45                 # "CURRENTTOBEST1BIN",
46                 "BEST2BIN",
47                 "RAND2BIN",
48                 "RAND1BIN",
49                 ],
50             )
51         self.defineRequiredParameter(
52             name     = "MaximumNumberOfSteps",
53             default  = 15000,
54             typecast = int,
55             message  = "Nombre maximal de générations",
56             minval   = 0,
57             )
58         self.defineRequiredParameter(
59             name     = "MaximumNumberOfFunctionEvaluations",
60             default  = 15000,
61             typecast = int,
62             message  = "Nombre maximal d'évaluations de la fonction",
63             minval   = -1,
64             )
65         self.defineRequiredParameter(
66             name     = "PopulationSize",
67             default  = 100,
68             typecast = int,
69             message  = "Taille approximative de la population à chaque génération",
70             minval   = 1,
71             )
72         self.defineRequiredParameter(
73             name     = "MutationDifferentialWeight_F",
74             default  = (0.5, 1),
75             typecast = tuple,
76             message  = "Poids différentiel de mutation, constant ou aléatoire dans l'intervalle, noté F",
77             minval   = 0.,
78             maxval   = 2.,
79             )
80         self.defineRequiredParameter(
81             name     = "CrossOverProbability_CR",
82             default  = 0.7,
83             typecast = float,
84             message  = "Probabilité de recombinaison ou de croisement, notée CR",
85             minval   = 0.,
86             maxval   = 1.,
87             )
88         self.defineRequiredParameter(
89             name     = "QualityCriterion",
90             default  = "AugmentedWeightedLeastSquares",
91             typecast = str,
92             message  = "Critère de qualité utilisé",
93             listval  = ["AugmentedWeightedLeastSquares","AWLS","DA",
94                         "WeightedLeastSquares","WLS",
95                         "LeastSquares","LS","L2",
96                         "AbsoluteValue","L1",
97                         "MaximumError","ME"],
98             )
99         self.defineRequiredParameter(
100             name     = "StoreInternalVariables",
101             default  = False,
102             typecast = bool,
103             message  = "Stockage des variables internes ou intermédiaires du calcul",
104             )
105         self.defineRequiredParameter(
106             name     = "StoreSupplementaryCalculations",
107             default  = [],
108             typecast = tuple,
109             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
110             listval  = [
111                 "Analysis",
112                 "BMA",
113                 "CostFunctionJ",
114                 "CostFunctionJb",
115                 "CostFunctionJo",
116                 "CostFunctionJAtCurrentOptimum",
117                 "CostFunctionJbAtCurrentOptimum",
118                 "CostFunctionJoAtCurrentOptimum",
119                 "CurrentOptimum",
120                 "CurrentState",
121                 "IndexOfOptimum",
122                 "Innovation",
123                 "InnovationAtCurrentState",
124                 "OMA",
125                 "OMB",
126                 "SimulatedObservationAtBackground",
127                 "SimulatedObservationAtCurrentOptimum",
128                 "SimulatedObservationAtCurrentState",
129                 "SimulatedObservationAtOptimum",
130                 ]
131             )
132         self.defineRequiredParameter(
133             name     = "SetSeed",
134             typecast = numpy.random.seed,
135             message  = "Graine fixée pour le générateur aléatoire",
136             )
137         self.defineRequiredParameter( # Pas de type
138             name     = "Bounds",
139             message  = "Liste des valeurs de bornes",
140             )
141         self.requireInputArguments(
142             mandatory= ("Xb", "Y", "HO", "R", "B" ),
143             )
144         self.setAttributes(tags=(
145             "Optimization",
146             "NonLinear",
147             "MetaHeuristic",
148             "Population",
149             ))
150
151     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
152         self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
153         #
154         len_X = numpy.asarray(Xb).size
155         popsize = round(self._parameters["PopulationSize"]/len_X)
156         maxiter = min(self._parameters["MaximumNumberOfSteps"],round(self._parameters["MaximumNumberOfFunctionEvaluations"]/(popsize*len_X) - 1))
157         logging.debug("%s Nombre maximal de générations = %i, taille de la population à chaque génération = %i"%(self._name, maxiter, popsize*len_X))
158         #
159         # Opérateurs
160         # ----------
161         Hm = HO["Direct"].appliedTo
162         #
163         # Précalcul des inversions de B et R
164         # ----------------------------------
165         BI = B.getI()
166         RI = R.getI()
167         #
168         # Définition de la fonction-coût
169         # ------------------------------
170         def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"):
171             _X  = numpy.asmatrix(numpy.ravel( x )).T
172             self.StoredVariables["CurrentState"].store( _X )
173             _HX = Hm( _X )
174             _HX = numpy.asmatrix(numpy.ravel( _HX )).T
175             _Innovation = Y - _HX
176             if self._toStore("SimulatedObservationAtCurrentState") or \
177                 self._toStore("SimulatedObservationAtCurrentOptimum"):
178                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
179             if self._toStore("InnovationAtCurrentState"):
180                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
181             #
182             if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
183                 if BI is None or RI is None:
184                     raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
185                 Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
186                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
187             elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
188                 if RI is None:
189                     raise ValueError("Observation error covariance matrix has to be properly defined!")
190                 Jb  = 0.
191                 Jo  = 0.5 * (_Innovation).T * RI * (_Innovation)
192             elif QualityMeasure in ["LeastSquares","LS","L2"]:
193                 Jb  = 0.
194                 Jo  = 0.5 * (_Innovation).T * (_Innovation)
195             elif QualityMeasure in ["AbsoluteValue","L1"]:
196                 Jb  = 0.
197                 Jo  = numpy.sum( numpy.abs(_Innovation) )
198             elif QualityMeasure in ["MaximumError","ME"]:
199                 Jb  = 0.
200                 Jo  = numpy.max( numpy.abs(_Innovation) )
201             #
202             J   = float( Jb ) + float( Jo )
203             #
204             self.StoredVariables["CostFunctionJb"].store( Jb )
205             self.StoredVariables["CostFunctionJo"].store( Jo )
206             self.StoredVariables["CostFunctionJ" ].store( J )
207             if self._toStore("IndexOfOptimum") or \
208                 self._toStore("CurrentOptimum") or \
209                 self._toStore("CostFunctionJAtCurrentOptimum") or \
210                 self._toStore("CostFunctionJbAtCurrentOptimum") or \
211                 self._toStore("CostFunctionJoAtCurrentOptimum") or \
212                 self._toStore("SimulatedObservationAtCurrentOptimum"):
213                 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
214             if self._toStore("IndexOfOptimum"):
215                 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
216             if self._toStore("CurrentOptimum"):
217                 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
218             if self._toStore("SimulatedObservationAtCurrentOptimum"):
219                 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
220             if self._toStore("CostFunctionJAtCurrentOptimum"):
221                 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
222             if self._toStore("CostFunctionJbAtCurrentOptimum"):
223                 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
224             if self._toStore("CostFunctionJoAtCurrentOptimum"):
225                 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
226             return J
227         #
228         # Point de démarrage de l'optimisation : Xini = Xb
229         # ------------------------------------
230         Xini = numpy.ravel(Xb)
231         #
232         # Minimisation de la fonctionnelle
233         # --------------------------------
234         nbPreviousSteps = self.StoredVariables["CostFunctionJ"].stepnumber()
235         #
236         optResults = scipy.optimize.differential_evolution(
237             CostFunction,
238             self._parameters["Bounds"],
239             strategy      = str(self._parameters["Minimizer"]).lower(),
240             maxiter       = maxiter,
241             popsize       = popsize,
242             mutation      = self._parameters["MutationDifferentialWeight_F"],
243             recombination = self._parameters["CrossOverProbability_CR"],
244             disp          = self._parameters["optdisp"],
245             )
246         #
247         IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
248         MinJ     = self.StoredVariables["CostFunctionJ"][IndexMin]
249         Minimum  = self.StoredVariables["CurrentState"][IndexMin]
250         #
251         # Obtention de l'analyse
252         # ----------------------
253         Xa = numpy.ravel( Minimum )
254         #
255         self.StoredVariables["Analysis"].store( Xa )
256         #
257         # Calculs et/ou stockages supplémentaires
258         # ---------------------------------------
259         if self._toStore("OMA") or self._toStore("SimulatedObservationAtOptimum"):
260             if self._toStore("SimulatedObservationAtCurrentState"):
261                 HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
262             elif self._toStore("SimulatedObservationAtCurrentOptimum"):
263                 HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
264             else:
265                 HXa = Hm(Xa)
266         if self._toStore("Innovation") or \
267            self._toStore("OMB"):
268             d  = Y - HXb
269         if self._toStore("Innovation"):
270             self.StoredVariables["Innovation"].store( numpy.ravel(d) )
271         if self._toStore("OMB"):
272             self.StoredVariables["OMB"].store( numpy.ravel(d) )
273         if self._toStore("BMA"):
274             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
275         if self._toStore("OMA"):
276             self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
277         if self._toStore("SimulatedObservationAtBackground"):
278             self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) )
279         if self._toStore("SimulatedObservationAtOptimum"):
280             self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
281         #
282         self._post_run()
283         return 0
284
285 # ==============================================================================
286 if __name__ == "__main__":
287     print('\n AUTODIAGNOSTIC\n')