From 2478097ef62d168c457c6af1af06f7c0a24278f1 Mon Sep 17 00:00:00 2001 From: Jean-Philippe ARGAUD Date: Fri, 4 Mar 2022 08:21:55 +0100 Subject: [PATCH] Minor documentation and code review corrections (27) Safely remove some deprecated numpy.matrix and review internal variable storage --- src/daComposant/daAlgorithms/Atoms/cekf.py | 42 ++++-- src/daComposant/daAlgorithms/Atoms/ecwnlls.py | 2 +- src/daComposant/daAlgorithms/Atoms/exkf.py | 42 ++++-- .../daAlgorithms/Atoms/incr3dvar.py | 2 +- src/daComposant/daAlgorithms/Atoms/mmqr.py | 4 +- .../daAlgorithms/Atoms/psas3dvar.py | 2 +- .../daAlgorithms/Atoms/std3dvar.py | 2 +- .../daAlgorithms/Atoms/std4dvar.py | 2 +- .../daAlgorithms/Atoms/van3dvar.py | 2 +- .../DerivativeFreeOptimization.py | 53 +++---- .../daAlgorithms/DifferentialEvolution.py | 60 ++++---- .../daAlgorithms/ParticleSwarmOptimization.py | 97 ++++++------- .../daAlgorithms/QuantileRegression.py | 56 +++---- src/daComposant/daAlgorithms/TabuSearch.py | 137 ++++++++---------- src/daComposant/daCore/BasicObjects.py | 13 +- src/daComposant/daCore/PlatformInfo.py | 2 +- 16 files changed, 247 insertions(+), 271 deletions(-) diff --git a/src/daComposant/daAlgorithms/Atoms/cekf.py b/src/daComposant/daAlgorithms/Atoms/cekf.py index c668b5c..908ea2e 100644 --- a/src/daComposant/daAlgorithms/Atoms/cekf.py +++ b/src/daComposant/daAlgorithms/Atoms/cekf.py @@ -47,18 +47,21 @@ def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): else: duration = 2 __p = numpy.array(Y).size + __n = Xb.size # # Précalcul des inversions de B et R - if selfA._parameters["StoreInternalVariables"] \ - or selfA._toStore("CostFunctionJ") \ - or selfA._toStore("CostFunctionJb") \ - or selfA._toStore("CostFunctionJo") \ - or selfA._toStore("CurrentOptimum") \ - or selfA._toStore("APosterioriCovariance"): - BI = B.getI() + if selfA._parameters["StoreInternalVariables"] or \ + selfA._toStore("CostFunctionJ") or selfA._toStore("CostFunctionJAtCurrentOptimum") or \ + selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \ + selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \ + selfA._toStore("CurrentOptimum") or selfA._toStore("APosterioriCovariance") or \ + (__p > __n): + if isinstance(B,numpy.ndarray): + BI = numpy.linalg.inv(B) + else: + BI = B.getI() RI = R.getI() # - __n = Xb.size nbPreviousSteps = len(selfA.StoredVariables["Analysis"]) # if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]: @@ -75,6 +78,8 @@ def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): elif selfA._parameters["nextStep"]: Xn = selfA._getInternalState("Xn") Pn = selfA._getInternalState("Pn") + if hasattr(Pn,"asfullmatrix"): + Pn = Pn.asfullmatrix(Xn.size) # if selfA._parameters["EstimationOf"] == "Parameters": XaMin = Xn @@ -106,7 +111,7 @@ def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): Cm = CM["Tangent"].asMatrix(Xn_predicted) Cm = Cm.reshape(__n,Un.size) # ADAO & check shape Xn_predicted = Xn_predicted + Cm @ Un - Pn_predicted = Q + Mt * (Pn * Ma) + Pn_predicted = Q + Mt @ (Pn @ Ma) elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast # --- > Par principe, M = Id, Q = 0 Xn_predicted = Xn @@ -137,9 +142,22 @@ def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): Cm = Cm.reshape(__n,Un.size) # ADAO & check shape _Innovation = _Innovation - Cm @ Un # - Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha)) - Xn = Xn_predicted + Kn * _Innovation - Pn = Pn_predicted - Kn * Ht * Pn_predicted + if Ynpu.size <= Xn.size: + _HNHt = numpy.dot(Ht, Pn_predicted @ Ha) + _A = R + _HNHt + _u = numpy.linalg.solve( _A , _Innovation ) + Xn = Xn_predicted + (Pn_predicted @ (Ha @ _u)).reshape((-1,1)) + Kn = Pn_predicted @ (Ha @ numpy.linalg.inv(_A)) + else: + _HtRH = numpy.dot(Ha, RI @ Ht) + _A = numpy.linalg.inv(Pn_predicted) + _HtRH + _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI @ _Innovation) ) + Xn = Xn_predicted + _u.reshape((-1,1)) + Kn = numpy.linalg.inv(_A) @ (Ha @ RI.asfullmatrix(Ynpu.size)) + # + Pn = Pn_predicted - Kn @ (Ht @ Pn_predicted) + Pn = (Pn + Pn.T) * 0.5 # Symétrie + Pn = Pn + mpr*numpy.trace( Pn ) * numpy.identity(Xn.size) # Positivité # if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection": Xn = ApplyBounds( Xn, selfA._parameters["Bounds"] ) diff --git a/src/daComposant/daAlgorithms/Atoms/ecwnlls.py b/src/daComposant/daAlgorithms/Atoms/ecwnlls.py index ec3c4cb..1d9ca01 100644 --- a/src/daComposant/daAlgorithms/Atoms/ecwnlls.py +++ b/src/daComposant/daAlgorithms/Atoms/ecwnlls.py @@ -207,7 +207,7 @@ def ecwnlls(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False): ) # nfeval = infodict['nfev'] else: - raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"]) + raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"]) # IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps # diff --git a/src/daComposant/daAlgorithms/Atoms/exkf.py b/src/daComposant/daAlgorithms/Atoms/exkf.py index 6936fc9..2361cb3 100644 --- a/src/daComposant/daAlgorithms/Atoms/exkf.py +++ b/src/daComposant/daAlgorithms/Atoms/exkf.py @@ -45,18 +45,21 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): else: duration = 2 __p = numpy.array(Y).size + __n = Xb.size # # Précalcul des inversions de B et R - if selfA._parameters["StoreInternalVariables"] \ - or selfA._toStore("CostFunctionJ") \ - or selfA._toStore("CostFunctionJb") \ - or selfA._toStore("CostFunctionJo") \ - or selfA._toStore("CurrentOptimum") \ - or selfA._toStore("APosterioriCovariance"): - BI = B.getI() + if selfA._parameters["StoreInternalVariables"] or \ + selfA._toStore("CostFunctionJ") or selfA._toStore("CostFunctionJAtCurrentOptimum") or \ + selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \ + selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \ + selfA._toStore("CurrentOptimum") or selfA._toStore("APosterioriCovariance") or \ + (__p > __n): + if isinstance(B,numpy.ndarray): + BI = numpy.linalg.inv(B) + else: + BI = B.getI() RI = R.getI() # - __n = Xb.size nbPreviousSteps = len(selfA.StoredVariables["Analysis"]) # if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]: @@ -73,6 +76,8 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): elif selfA._parameters["nextStep"]: Xn = selfA._getInternalState("Xn") Pn = selfA._getInternalState("Pn") + if hasattr(Pn,"asfullmatrix"): + Pn = Pn.asfullmatrix(Xn.size) # if selfA._parameters["EstimationOf"] == "Parameters": XaMin = Xn @@ -101,7 +106,7 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): Cm = CM["Tangent"].asMatrix(Xn_predicted) Cm = Cm.reshape(__n,Un.size) # ADAO & check shape Xn_predicted = Xn_predicted + Cm @ Un - Pn_predicted = Q + Mt * (Pn * Ma) + Pn_predicted = Q + Mt @ (Pn @ Ma) elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast # --- > Par principe, M = Id, Q = 0 Xn_predicted = Xn @@ -129,9 +134,22 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): Cm = Cm.reshape(__n,Un.size) # ADAO & check shape _Innovation = _Innovation - Cm @ Un # - Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha)) - Xn = Xn_predicted + Kn * _Innovation - Pn = Pn_predicted - Kn * Ht * Pn_predicted + if Ynpu.size <= Xn.size: + _HNHt = numpy.dot(Ht, Pn_predicted @ Ha) + _A = R + _HNHt + _u = numpy.linalg.solve( _A , _Innovation ) + Xn = Xn_predicted + (Pn_predicted @ (Ha @ _u)).reshape((-1,1)) + Kn = Pn_predicted @ (Ha @ numpy.linalg.inv(_A)) + else: + _HtRH = numpy.dot(Ha, RI @ Ht) + _A = numpy.linalg.inv(Pn_predicted) + _HtRH + _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI @ _Innovation) ) + Xn = Xn_predicted + _u.reshape((-1,1)) + Kn = numpy.linalg.inv(_A) @ (Ha @ RI.asfullmatrix(Ynpu.size)) + # + Pn = Pn_predicted - Kn @ (Ht @ Pn_predicted) + Pn = (Pn + Pn.T) * 0.5 # Symétrie + Pn = Pn + mpr*numpy.trace( Pn ) * numpy.identity(Xn.size) # Positivité # Xa = Xn # Pointeurs #-------------------------- diff --git a/src/daComposant/daAlgorithms/Atoms/incr3dvar.py b/src/daComposant/daAlgorithms/Atoms/incr3dvar.py index 10b903c..f553543 100644 --- a/src/daComposant/daAlgorithms/Atoms/incr3dvar.py +++ b/src/daComposant/daAlgorithms/Atoms/incr3dvar.py @@ -197,7 +197,7 @@ def incr3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False): full_output = True, ) else: - raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"]) + raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"]) # IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps MinJ = selfA.StoredVariables["CostFunctionJ"][IndexMin] diff --git a/src/daComposant/daAlgorithms/Atoms/mmqr.py b/src/daComposant/daAlgorithms/Atoms/mmqr.py index b1a58a0..48bf735 100644 --- a/src/daComposant/daAlgorithms/Atoms/mmqr.py +++ b/src/daComposant/daAlgorithms/Atoms/mmqr.py @@ -78,14 +78,14 @@ def mmqr( Derivees = numpy.array(fprime(variables)) Derivees = Derivees.reshape(n,p) # ADAO & check shape DeriveesT = Derivees.transpose() - M = numpy.dot( DeriveesT , (numpy.array(numpy.matrix(p*[poids,]).T)*Derivees) ) + M = numpy.dot( DeriveesT , (numpy.array(p*[poids,]).T * Derivees) ) SM = numpy.transpose(numpy.dot( DeriveesT , veps )) step = - numpy.linalg.lstsq( M, SM, rcond=-1 )[0] # variables = variables + step if bounds is not None: # Attention : boucle infinie à éviter si un intervalle est trop petit - while( (variables < numpy.ravel(numpy.asmatrix(bounds)[:,0])).any() or (variables > numpy.ravel(numpy.asmatrix(bounds)[:,1])).any() ): + while( (variables < numpy.ravel(numpy.asarray(bounds)[:,0])).any() or (variables > numpy.ravel(numpy.asarray(bounds)[:,1])).any() ): step = step/2. variables = variables - step residus = mesures - numpy.ravel( func(variables) ) diff --git a/src/daComposant/daAlgorithms/Atoms/psas3dvar.py b/src/daComposant/daAlgorithms/Atoms/psas3dvar.py index 8f69ece..dad9e59 100644 --- a/src/daComposant/daAlgorithms/Atoms/psas3dvar.py +++ b/src/daComposant/daAlgorithms/Atoms/psas3dvar.py @@ -175,7 +175,7 @@ def psas3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False): full_output = True, ) else: - raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"]) + raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"]) # IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps MinJ = selfA.StoredVariables["CostFunctionJ"][IndexMin] diff --git a/src/daComposant/daAlgorithms/Atoms/std3dvar.py b/src/daComposant/daAlgorithms/Atoms/std3dvar.py index 5ee2976..bd70689 100644 --- a/src/daComposant/daAlgorithms/Atoms/std3dvar.py +++ b/src/daComposant/daAlgorithms/Atoms/std3dvar.py @@ -180,7 +180,7 @@ def std3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False): full_output = True, ) else: - raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"]) + raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"]) # IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps MinJ = selfA.StoredVariables["CostFunctionJ"][IndexMin] diff --git a/src/daComposant/daAlgorithms/Atoms/std4dvar.py b/src/daComposant/daAlgorithms/Atoms/std4dvar.py index 1eca826..cd582a8 100644 --- a/src/daComposant/daAlgorithms/Atoms/std4dvar.py +++ b/src/daComposant/daAlgorithms/Atoms/std4dvar.py @@ -241,7 +241,7 @@ def std4dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): full_output = True, ) else: - raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"]) + raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"]) # IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps # diff --git a/src/daComposant/daAlgorithms/Atoms/van3dvar.py b/src/daComposant/daAlgorithms/Atoms/van3dvar.py index 0e96130..5ae73fa 100644 --- a/src/daComposant/daAlgorithms/Atoms/van3dvar.py +++ b/src/daComposant/daAlgorithms/Atoms/van3dvar.py @@ -189,7 +189,7 @@ def van3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False): full_output = True, ) else: - raise ValueError("Error in Minimizer name: %s"%selfA._parameters["Minimizer"]) + raise ValueError("Error in minimizer name: %s is unkown"%selfA._parameters["Minimizer"]) # IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps MinJ = selfA.StoredVariables["CostFunctionJ"][IndexMin] diff --git a/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py b/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py index 299a1b4..b191c8f 100644 --- a/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py +++ b/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py @@ -20,9 +20,8 @@ # # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D -import logging +import numpy, logging, scipy.optimize from daCore import BasicObjects, PlatformInfo -import numpy, scipy.optimize # ============================================================================== class ElementaryAlgorithm(BasicObjects.Algorithm): @@ -74,11 +73,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): typecast = str, message = "Critère de qualité utilisé", listval = [ - "AugmentedWeightedLeastSquares","AWLS","DA", - "WeightedLeastSquares","WLS", - "LeastSquares","LS","L2", - "AbsoluteValue","L1", - "MaximumError","ME", + "AugmentedWeightedLeastSquares", "AWLS", "DA", + "WeightedLeastSquares", "WLS", + "LeastSquares", "LS", "L2", + "AbsoluteValue", "L1", + "MaximumError", "ME", ], ) self.defineRequiredParameter( @@ -120,7 +119,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): message = "Liste des valeurs de bornes", ) self.requireInputArguments( - mandatory= ("Xb", "Y", "HO", "R", "B" ), + mandatory= ("Xb", "Y", "HO", "R", "B"), ) self.setAttributes(tags=( "Optimization", @@ -135,23 +134,16 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): logging.warning("%s Minimization by SIMPLEX is forced because %s is unavailable (COBYLA, POWELL are also available)"%(self._name,self._parameters["Minimizer"])) self._parameters["Minimizer"] = "SIMPLEX" # - # Opérateurs - # ---------- Hm = HO["Direct"].appliedTo # - # Précalcul des inversions de B et R - # ---------------------------------- BI = B.getI() RI = R.getI() # - # Définition de la fonction-coût - # ------------------------------ def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"): - _X = numpy.asmatrix(numpy.ravel( x )).T - self.StoredVariables["CurrentState"].store( _X ) - _HX = Hm( _X ) - _HX = numpy.asmatrix(numpy.ravel( _HX )).T + _X = numpy.ravel( x ).reshape((-1,1)) + _HX = numpy.ravel( Hm( _X ) ).reshape((-1,1)) _Innovation = Y - _HX + self.StoredVariables["CurrentState"].store( _X ) if self._toStore("SimulatedObservationAtCurrentState") or \ self._toStore("SimulatedObservationAtCurrentOptimum"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX ) @@ -160,17 +152,17 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]: if BI is None or RI is None: - raise ValueError("Background and Observation error covariance matrix has to be properly defined!") - Jb = 0.5 * (_X - Xb).T * (BI * (_X - Xb)) - Jo = 0.5 * _Innovation.T * (RI * _Innovation) + raise ValueError("Background and Observation error covariance matrices has to be properly defined!") + Jb = 0.5 * (_X - Xb).T @ (BI @ (_X - Xb)) + Jo = 0.5 * _Innovation.T @ (RI @ _Innovation) elif QualityMeasure in ["WeightedLeastSquares","WLS"]: if RI is None: raise ValueError("Observation error covariance matrix has to be properly defined!") Jb = 0. - Jo = 0.5 * (_Innovation).T * RI * (_Innovation) + Jo = 0.5 * _Innovation.T @ (RI @ _Innovation) elif QualityMeasure in ["LeastSquares","LS","L2"]: Jb = 0. - Jo = 0.5 * (_Innovation).T * (_Innovation) + Jo = 0.5 * _Innovation.T @ _Innovation elif QualityMeasure in ["AbsoluteValue","L1"]: Jb = 0. Jo = numpy.sum( numpy.abs(_Innovation) ) @@ -205,8 +197,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] ) return J # - # Point de démarrage de l'optimisation : Xini = Xb - # ------------------------------------ Xini = numpy.ravel(Xb) if len(Xini) < 2 and self._parameters["Minimizer"] == "NEWUOA": raise ValueError("The minimizer %s can not be used when the optimisation state dimension is 1. Please choose another minimizer."%self._parameters["Minimizer"]) @@ -382,7 +372,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())) print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())) else: - raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"]) + raise ValueError("Error in minimizer name: %s is unkown"%self._parameters["Minimizer"]) # IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps MinJ = self.StoredVariables["CostFunctionJ"][IndexMin] @@ -390,7 +380,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Obtention de l'analyse # ---------------------- - Xa = numpy.ravel( Minimum ) + Xa = Minimum # self.StoredVariables["Analysis"].store( Xa ) # @@ -404,11 +394,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1] else: HXa = Hm(Xa) + HXa = HXa.reshape((-1,1)) if self._toStore("Innovation") or \ self._toStore("OMB") or \ self._toStore("SimulatedObservationAtBackground"): - HXb = Hm(Xb) - Innovation = Y - HXb + HXb = Hm(Xb).reshape((-1,1)) + Innovation = Y - HXb if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( Innovation ) if self._toStore("OMB"): @@ -416,13 +407,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) if self._toStore("OMA"): - self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) ) + self.StoredVariables["OMA"].store( Y - HXa ) if self._toStore("SimulatedObservationAtBackground"): self.StoredVariables["SimulatedObservationAtBackground"].store( HXb ) if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa ) # - self._post_run() + self._post_run(HO) return 0 # ============================================================================== diff --git a/src/daComposant/daAlgorithms/DifferentialEvolution.py b/src/daComposant/daAlgorithms/DifferentialEvolution.py index 11fdbba..0a624a4 100644 --- a/src/daComposant/daAlgorithms/DifferentialEvolution.py +++ b/src/daComposant/daAlgorithms/DifferentialEvolution.py @@ -20,7 +20,7 @@ # # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D -import logging, numpy, scipy.optimize +import numpy, logging, scipy.optimize from daCore import BasicObjects # ============================================================================== @@ -63,6 +63,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): message = "Nombre maximal d'évaluations de la fonction", minval = -1, ) + self.defineRequiredParameter( + name = "SetSeed", + typecast = numpy.random.seed, + message = "Graine fixée pour le générateur aléatoire", + ) self.defineRequiredParameter( name = "PopulationSize", default = 100, @@ -92,11 +97,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): typecast = str, message = "Critère de qualité utilisé", listval = [ - "AugmentedWeightedLeastSquares","AWLS","DA", - "WeightedLeastSquares","WLS", - "LeastSquares","LS","L2", - "AbsoluteValue","L1", - "MaximumError","ME", + "AugmentedWeightedLeastSquares", "AWLS", "DA", + "WeightedLeastSquares", "WLS", + "LeastSquares", "LS", "L2", + "AbsoluteValue", "L1", + "MaximumError", "ME", ], ) self.defineRequiredParameter( @@ -133,17 +138,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "SimulatedObservationAtOptimum", ] ) - self.defineRequiredParameter( - name = "SetSeed", - typecast = numpy.random.seed, - message = "Graine fixée pour le générateur aléatoire", - ) self.defineRequiredParameter( # Pas de type name = "Bounds", message = "Liste des valeurs de bornes", ) self.requireInputArguments( - mandatory= ("Xb", "Y", "HO", "R", "B" ), + mandatory= ("Xb", "Y", "HO", "R", "B"), ) self.setAttributes(tags=( "Optimization", @@ -160,23 +160,16 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): maxiter = min(self._parameters["MaximumNumberOfSteps"],round(self._parameters["MaximumNumberOfFunctionEvaluations"]/(popsize*len_X) - 1)) logging.debug("%s Nombre maximal de générations = %i, taille de la population à chaque génération = %i"%(self._name, maxiter, popsize*len_X)) # - # Opérateurs - # ---------- Hm = HO["Direct"].appliedTo # - # Précalcul des inversions de B et R - # ---------------------------------- BI = B.getI() RI = R.getI() # - # Définition de la fonction-coût - # ------------------------------ def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"): - _X = numpy.asmatrix(numpy.ravel( x )).T - self.StoredVariables["CurrentState"].store( _X ) - _HX = Hm( _X ) - _HX = numpy.asmatrix(numpy.ravel( _HX )).T + _X = numpy.ravel( x ).reshape((-1,1)) + _HX = numpy.ravel( Hm( _X ) ).reshape((-1,1)) _Innovation = Y - _HX + self.StoredVariables["CurrentState"].store( _X ) if self._toStore("SimulatedObservationAtCurrentState") or \ self._toStore("SimulatedObservationAtCurrentOptimum"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX ) @@ -185,17 +178,17 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]: if BI is None or RI is None: - raise ValueError("Background and Observation error covariance matrix has to be properly defined!") - Jb = 0.5 * (_X - Xb).T * (BI * (_X - Xb)) - Jo = 0.5 * _Innovation.T * (RI * _Innovation) + raise ValueError("Background and Observation error covariance matrices has to be properly defined!") + Jb = 0.5 * (_X - Xb).T @ (BI @ (_X - Xb)) + Jo = 0.5 * _Innovation.T @ (RI @ _Innovation) elif QualityMeasure in ["WeightedLeastSquares","WLS"]: if RI is None: raise ValueError("Observation error covariance matrix has to be properly defined!") Jb = 0. - Jo = 0.5 * (_Innovation).T * RI * (_Innovation) + Jo = 0.5 * _Innovation.T @ (RI @ _Innovation) elif QualityMeasure in ["LeastSquares","LS","L2"]: Jb = 0. - Jo = 0.5 * (_Innovation).T * (_Innovation) + Jo = 0.5 * _Innovation.T @ _Innovation elif QualityMeasure in ["AbsoluteValue","L1"]: Jb = 0. Jo = numpy.sum( numpy.abs(_Innovation) ) @@ -230,8 +223,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] ) return J # - # Point de démarrage de l'optimisation : Xini = Xb - # ------------------------------------ Xini = numpy.ravel(Xb) # # Minimisation de la fonctionnelle @@ -255,7 +246,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Obtention de l'analyse # ---------------------- - Xa = numpy.ravel( Minimum ) + Xa = Minimum # self.StoredVariables["Analysis"].store( Xa ) # @@ -269,11 +260,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1] else: HXa = Hm(Xa) + HXa = HXa.reshape((-1,1)) if self._toStore("Innovation") or \ self._toStore("OMB") or \ self._toStore("SimulatedObservationAtBackground"): - HXb = Hm(Xb) - Innovation = Y - HXb + HXb = Hm(Xb).reshape((-1,1)) + Innovation = Y - HXb if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( Innovation ) if self._toStore("OMB"): @@ -281,13 +273,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) if self._toStore("OMA"): - self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) ) + self.StoredVariables["OMA"].store( Y - HXa ) if self._toStore("SimulatedObservationAtBackground"): self.StoredVariables["SimulatedObservationAtBackground"].store( HXb ) if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa ) # - self._post_run() + self._post_run(HO) return 0 # ============================================================================== diff --git a/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py b/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py index 408a6d5..c7d743d 100644 --- a/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py +++ b/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py @@ -20,9 +20,8 @@ # # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D -import logging +import numpy, logging, copy from daCore import BasicObjects -import numpy, copy # ============================================================================== class ElementaryAlgorithm(BasicObjects.Algorithm): @@ -75,17 +74,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): typecast = str, message = "Critère de qualité utilisé", listval = [ - "DA", - "AugmentedWeightedLeastSquares", "AWLS", + "AugmentedWeightedLeastSquares", "AWLS", "DA", "WeightedLeastSquares", "WLS", "LeastSquares", "LS", "L2", "AbsoluteValue", "L1", "MaximumError", "ME", ], - listadv = [ - "AugmentedPonderatedLeastSquares","APLS", - "PonderatedLeastSquares","PLS", - ] ) self.defineRequiredParameter( name = "StoreInternalVariables", @@ -124,6 +118,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.setAttributes(tags=( "Optimization", "NonLinear", + "MetaHeuristic", "Population", )) @@ -148,32 +143,30 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): BI = B.getI() RI = R.getI() # - # Définition de la fonction-coût - # ------------------------------ def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"): - _X = numpy.asmatrix(numpy.ravel( x )).T - _HX = Hm( _X ) - _HX = numpy.asmatrix(numpy.ravel( _HX )).T + _X = numpy.ravel( x ).reshape((-1,1)) + _HX = numpy.ravel( Hm( _X ) ).reshape((-1,1)) + _Innovation = Y - _HX # - if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","AugmentedPonderatedLeastSquares","APLS","DA"]: + if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]: if BI is None or RI is None: - raise ValueError("Background and Observation error covariance matrix has to be properly defined!") - Jb = 0.5 * (_X - Xb).T * BI * (_X - Xb) - Jo = 0.5 * (Y - _HX).T * RI * (Y - _HX) - elif QualityMeasure in ["WeightedLeastSquares","WLS","PonderatedLeastSquares","PLS"]: + raise ValueError("Background and Observation error covariance matrices has to be properly defined!") + Jb = 0.5 * (_X - Xb).T @ (BI @ (_X - Xb)) + Jo = 0.5 * _Innovation.T @ (RI @ _Innovation) + elif QualityMeasure in ["WeightedLeastSquares","WLS"]: if RI is None: raise ValueError("Observation error covariance matrix has to be properly defined!") Jb = 0. - Jo = 0.5 * (Y - _HX).T * RI * (Y - _HX) + Jo = 0.5 * _Innovation.T @ (RI @ _Innovation) elif QualityMeasure in ["LeastSquares","LS","L2"]: Jb = 0. - Jo = 0.5 * (Y - _HX).T * (Y - _HX) + Jo = 0.5 * _Innovation.T @ _Innovation elif QualityMeasure in ["AbsoluteValue","L1"]: Jb = 0. - Jo = numpy.sum( numpy.abs(Y - _HX) ) + Jo = numpy.sum( numpy.abs(_Innovation) ) elif QualityMeasure in ["MaximumError","ME"]: Jb = 0. - Jo = numpy.max( numpy.abs(Y - _HX) ) + Jo = numpy.max( numpy.abs(_Innovation) ) # J = float( Jb ) + float( Jo ) # @@ -198,13 +191,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): for i in range(nbparam) : PosInsect.append(numpy.random.uniform(low=SpaceLow[i], high=SpaceUp[i], size=self._parameters["NumberOfInsects"])) VelocityInsect.append(numpy.random.uniform(low=-LimitVelocity[i], high=LimitVelocity[i], size=self._parameters["NumberOfInsects"])) - VelocityInsect = numpy.matrix(VelocityInsect) - PosInsect = numpy.matrix(PosInsect) + VelocityInsect = numpy.array(VelocityInsect) + PosInsect = numpy.array(PosInsect) # BestPosInsect = numpy.array(PosInsect) qBestPosInsect = [] - Best = copy.copy(SpaceLow) - qBest = CostFunction(Best,self._parameters["QualityCriterion"]) + _Best = copy.copy(SpaceLow) + _qualityBest = CostFunction(_Best,self._parameters["QualityCriterion"]) NumberOfFunctionEvaluations += 1 # for i in range(self._parameters["NumberOfInsects"]): @@ -212,17 +205,17 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): quality = CostFunction(insect,self._parameters["QualityCriterion"]) NumberOfFunctionEvaluations += 1 qBestPosInsect.append(quality) - if quality < qBest: - Best = copy.copy( insect ) - qBest = copy.copy( quality ) - logging.debug("%s Initialisation, Insecte = %s, Qualité = %s"%(self._name, str(Best), str(qBest))) + if quality < _qualityBest: + _Best = copy.copy( insect ) + _qualityBest = copy.copy( quality ) + logging.debug("%s Initialisation, Insecte = %s, Qualité = %s"%(self._name, str(_Best), str(_qualityBest))) # self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) ) if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): - self.StoredVariables["CurrentState"].store( Best ) + self.StoredVariables["CurrentState"].store( _Best ) self.StoredVariables["CostFunctionJb"].store( 0. ) self.StoredVariables["CostFunctionJo"].store( 0. ) - self.StoredVariables["CostFunctionJ" ].store( qBest ) + self.StoredVariables["CostFunctionJ" ].store( _qualityBest ) # # Minimisation de la fonctionnelle # -------------------------------- @@ -232,57 +225,55 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): rp = numpy.random.uniform(size=nbparam) rg = numpy.random.uniform(size=nbparam) for j in range(nbparam) : - VelocityInsect[j,i] = self._parameters["SwarmVelocity"]*VelocityInsect[j,i] + Phip*rp[j]*(BestPosInsect[j,i]-PosInsect[j,i]) + Phig*rg[j]*(Best[j]-PosInsect[j,i]) + VelocityInsect[j,i] = self._parameters["SwarmVelocity"]*VelocityInsect[j,i] + Phip*rp[j]*(BestPosInsect[j,i]-PosInsect[j,i]) + Phig*rg[j]*(_Best[j]-PosInsect[j,i]) PosInsect[j,i] = PosInsect[j,i]+VelocityInsect[j,i] quality = CostFunction(insect,self._parameters["QualityCriterion"]) NumberOfFunctionEvaluations += 1 if quality < qBestPosInsect[i]: BestPosInsect[:,i] = copy.copy( insect ) qBestPosInsect[i] = copy.copy( quality ) - if quality < qBest : - Best = copy.copy( insect ) - qBest = copy.copy( quality ) - logging.debug("%s Etape %i, Insecte = %s, Qualité = %s"%(self._name, n, str(Best), str(qBest))) + if quality < _qualityBest : + _Best = copy.copy( insect ) + _qualityBest = copy.copy( quality ) + logging.debug("%s Etape %i, Insecte = %s, Qualité = %s"%(self._name, n, str(_Best), str(_qualityBest))) # self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) ) if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): - self.StoredVariables["CurrentState"].store( Best ) + self.StoredVariables["CurrentState"].store( _Best ) if self._toStore("SimulatedObservationAtCurrentState"): - _HmX = Hm( numpy.asmatrix(numpy.ravel( Best )).T ) - _HmX = numpy.asmatrix(numpy.ravel( _HmX )).T + _HmX = Hm( _Best ) self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HmX ) self.StoredVariables["CostFunctionJb"].store( 0. ) self.StoredVariables["CostFunctionJo"].store( 0. ) - self.StoredVariables["CostFunctionJ" ].store( qBest ) + self.StoredVariables["CostFunctionJ" ].store( _qualityBest ) if NumberOfFunctionEvaluations > self._parameters["MaximumNumberOfFunctionEvaluations"]: logging.debug("%s Stopping search because the number %i of function evaluations is exceeding the maximum %i."%(self._name, NumberOfFunctionEvaluations, self._parameters["MaximumNumberOfFunctionEvaluations"])) break # # Obtention de l'analyse # ---------------------- - Xa = numpy.asmatrix(numpy.ravel( Best )).T + Xa = _Best # - self.StoredVariables["Analysis"].store( Xa.A1 ) + self.StoredVariables["Analysis"].store( Xa ) # + # Calculs et/ou stockages supplémentaires + # --------------------------------------- + if self._toStore("OMA") or \ + self._toStore("SimulatedObservationAtOptimum"): + HXa = Hm(Xa) if self._toStore("Innovation") or \ self._toStore("OMB") or \ self._toStore("SimulatedObservationAtBackground"): HXb = Hm(Xb) - d = Y - HXb - if self._toStore("OMA") or \ - self._toStore("SimulatedObservationAtOptimum"): - HXa = Hm(Xa) - # - # Calculs et/ou stockages supplémentaires - # --------------------------------------- + Innovation = Y - HXb if self._toStore("Innovation"): - self.StoredVariables["Innovation"].store( d ) + self.StoredVariables["Innovation"].store( Innovation ) + if self._toStore("OMB"): + self.StoredVariables["OMB"].store( Innovation ) if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) if self._toStore("OMA"): self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) ) - if self._toStore("OMB"): - self.StoredVariables["OMB"].store( d ) if self._toStore("SimulatedObservationAtBackground"): self.StoredVariables["SimulatedObservationAtBackground"].store( HXb ) if self._toStore("SimulatedObservationAtOptimum"): diff --git a/src/daComposant/daAlgorithms/QuantileRegression.py b/src/daComposant/daAlgorithms/QuantileRegression.py index 390e48f..f806736 100644 --- a/src/daComposant/daAlgorithms/QuantileRegression.py +++ b/src/daComposant/daAlgorithms/QuantileRegression.py @@ -107,28 +107,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Hm = HO["Direct"].appliedTo # - # Utilisation éventuelle d'un vecteur H(Xb) précalculé - # ---------------------------------------------------- - if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]: - HXb = Hm( Xb, HO["AppliedInX"]["HXb"] ) - else: - HXb = Hm( Xb ) - HXb = numpy.asmatrix(numpy.ravel( HXb )).T - if Y.size != HXb.size: - raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size)) - if max(Y.shape) != max(HXb.shape): - raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape)) - d = Y - HXb - # - # Définition de la fonction-coût - # ------------------------------ def CostFunction(x): - _X = numpy.asmatrix(numpy.ravel( x )).T + _X = numpy.asarray(x).reshape((-1,1)) if self._parameters["StoreInternalVariables"] or \ self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( _X ) - _HX = Hm( _X ) - _HX = numpy.asmatrix(numpy.ravel( _HX )).T + _HX = numpy.asarray(Hm( _X )).reshape((-1,1)) if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX ) Jb = 0. @@ -142,12 +126,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): return _HX # def GradientOfCostFunction(x): - _X = numpy.asmatrix(numpy.ravel( x )).T + _X = numpy.asarray(x).reshape((-1,1)) Hg = HO["Tangent"].asMatrix( _X ) return Hg # - # Point de démarrage de l'optimisation - # ------------------------------------ Xini = self._parameters["InitializationPoint"] # # Minimisation de la fonctionnelle @@ -163,35 +145,37 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): toler = self._parameters["CostDecrementTolerance"], y = Y, ) - nfeval = Informations[2] - rc = Informations[4] else: - raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"]) + raise ValueError("Error in minimizer name: %s is unkown"%self._parameters["Minimizer"]) # # Obtention de l'analyse # ---------------------- - Xa = numpy.asmatrix(numpy.ravel( Minimum )).T - # - self.StoredVariables["Analysis"].store( Xa.A1 ) + Xa = Minimum # - if self._toStore("OMA") or \ - self._toStore("SimulatedObservationAtOptimum"): - HXa = Hm( Xa ) + self.StoredVariables["Analysis"].store( Xa ) # # Calculs et/ou stockages supplémentaires # --------------------------------------- + if self._toStore("OMA") or \ + self._toStore("SimulatedObservationAtOptimum"): + HXa = Hm(Xa).reshape((-1,1)) + if self._toStore("Innovation") or \ + self._toStore("OMB") or \ + self._toStore("SimulatedObservationAtBackground"): + HXb = Hm(Xb).reshape((-1,1)) + Innovation = Y - HXb if self._toStore("Innovation"): - self.StoredVariables["Innovation"].store( numpy.ravel(d) ) + self.StoredVariables["Innovation"].store( Innovation ) + if self._toStore("OMB"): + self.StoredVariables["OMB"].store( Innovation ) if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) if self._toStore("OMA"): - self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) ) - if self._toStore("OMB"): - self.StoredVariables["OMB"].store( numpy.ravel(d) ) + self.StoredVariables["OMA"].store( Y - HXa ) if self._toStore("SimulatedObservationAtBackground"): - self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) + self.StoredVariables["SimulatedObservationAtBackground"].store( HXb ) if self._toStore("SimulatedObservationAtOptimum"): - self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) + self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa ) # self._post_run(HO) return 0 diff --git a/src/daComposant/daAlgorithms/TabuSearch.py b/src/daComposant/daAlgorithms/TabuSearch.py index a33ba99..b6bf80c 100644 --- a/src/daComposant/daAlgorithms/TabuSearch.py +++ b/src/daComposant/daAlgorithms/TabuSearch.py @@ -65,22 +65,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): default = "AugmentedWeightedLeastSquares", typecast = str, message = "Critère de qualité utilisé", - listval = ["AugmentedWeightedLeastSquares","AWLS","DA", - "WeightedLeastSquares","WLS", - "LeastSquares","LS","L2", - "AbsoluteValue","L1", - "MaximumError","ME"], + listval = [ + "AugmentedWeightedLeastSquares", "AWLS", "DA", + "WeightedLeastSquares", "WLS", + "LeastSquares", "LS", "L2", + "AbsoluteValue", "L1", + "MaximumError", "ME", + ], ) self.defineRequiredParameter( name = "NoiseHalfRange", default = [], - typecast = numpy.matrix, + typecast = numpy.ravel, message = "Demi-amplitude des perturbations uniformes centrées d'état pour chaque composante de l'état", ) self.defineRequiredParameter( name = "StandardDeviation", default = [], - typecast = numpy.matrix, + typecast = numpy.ravel, message = "Ecart-type des perturbations gaussiennes d'état pour chaque composante de l'état", ) self.defineRequiredParameter( @@ -135,7 +137,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # if self._parameters["NoiseDistribution"] == "Uniform": - nrange = numpy.ravel(self._parameters["NoiseHalfRange"]) # Vecteur + nrange = self._parameters["NoiseHalfRange"] # Vecteur if nrange.size != Xb.size: raise ValueError("Noise generation by Uniform distribution requires range for all variable increments. The actual noise half range vector is:\n%s"%nrange) elif self._parameters["NoiseDistribution"] == "Gaussian": @@ -143,19 +145,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if sigma.size != Xb.size: raise ValueError("Noise generation by Gaussian distribution requires standard deviation for all variable increments. The actual standard deviation vector is:\n%s"%sigma) # - # Opérateur d'observation - # ----------------------- Hm = HO["Direct"].appliedTo # - # Précalcul des inversions de B et R - # ---------------------------------- BI = B.getI() RI = R.getI() # - # Définition de la fonction de deplacement - # ---------------------------------------- def Tweak( x, NoiseDistribution, NoiseAddingProbability ): - _X = numpy.matrix(numpy.ravel( x )).T + _X = numpy.array( x, dtype=float, copy=True ).ravel().reshape((-1,1)) if NoiseDistribution == "Uniform": for i in range(_X.size): if NoiseAddingProbability >= numpy.random.uniform(): @@ -171,32 +167,49 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # return _X # - def StateInList( x, TL ): + def StateInList( x, _TL ): _X = numpy.ravel( x ) _xInList = False - for state in TL: + for state in _TL: if numpy.all(numpy.abs( _X - numpy.ravel(state) ) <= 1e-16*numpy.abs(_X)): _xInList = True # if _xInList: import sys ; sys.exit() return _xInList # + def CostFunction(x, QualityMeasure="AugmentedWeightedLeastSquares"): + _X = numpy.ravel( x ).reshape((-1,1)) + _HX = numpy.ravel( Hm( _X ) ).reshape((-1,1)) + _Innovation = Y - _HX + # + if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]: + if BI is None or RI is None: + raise ValueError("Background and Observation error covariance matrices has to be properly defined!") + Jb = 0.5 * (_X - Xb).T @ (BI @ (_X - Xb)) + Jo = 0.5 * _Innovation.T @ (RI @ _Innovation) + elif QualityMeasure in ["WeightedLeastSquares","WLS"]: + if RI is None: + raise ValueError("Observation error covariance matrix has to be properly defined!") + Jb = 0. + Jo = 0.5 * _Innovation.T @ (RI @ _Innovation) + elif QualityMeasure in ["LeastSquares","LS","L2"]: + Jb = 0. + Jo = 0.5 * _Innovation.T @ _Innovation + elif QualityMeasure in ["AbsoluteValue","L1"]: + Jb = 0. + Jo = numpy.sum( numpy.abs(_Innovation) ) + elif QualityMeasure in ["MaximumError","ME"]: + Jb = 0. + Jo = numpy.max( numpy.abs(_Innovation) ) + # + J = float( Jb ) + float( Jo ) + # + return J + # # Minimisation de la fonctionnelle # -------------------------------- _n = 0 _S = Xb - # _qualityS = CostFunction( _S, self._parameters["QualityCriterion"] ) - _qualityS = BasicObjects.CostFunction3D( - _S, - _Hm = Hm, - _BI = BI, - _RI = RI, - _Xb = Xb, - _Y = Y, - _SSC = self._parameters["StoreSupplementaryCalculations"], - _QM = self._parameters["QualityCriterion"], - _SSV = self.StoredVariables, - _sSc = False, - ) + _qualityS = CostFunction( _S, self._parameters["QualityCriterion"] ) _Best, _qualityBest = _S, _qualityS _TabuList = [] _TabuList.append( _S ) @@ -205,34 +218,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if len(_TabuList) > self._parameters["LengthOfTabuList"]: _TabuList.pop(0) _R = Tweak( _S, self._parameters["NoiseDistribution"], self._parameters["NoiseAddingProbability"] ) - # _qualityR = CostFunction( _R, self._parameters["QualityCriterion"] ) - _qualityR = BasicObjects.CostFunction3D( - _R, - _Hm = Hm, - _BI = BI, - _RI = RI, - _Xb = Xb, - _Y = Y, - _SSC = self._parameters["StoreSupplementaryCalculations"], - _QM = self._parameters["QualityCriterion"], - _SSV = self.StoredVariables, - _sSc = False, - ) + _qualityR = CostFunction( _R, self._parameters["QualityCriterion"] ) for nbt in range(self._parameters["NumberOfElementaryPerturbations"]-1): _W = Tweak( _S, self._parameters["NoiseDistribution"], self._parameters["NoiseAddingProbability"] ) - # _qualityW = CostFunction( _W, self._parameters["QualityCriterion"] ) - _qualityW = BasicObjects.CostFunction3D( - _W, - _Hm = Hm, - _BI = BI, - _RI = RI, - _Xb = Xb, - _Y = Y, - _SSC = self._parameters["StoreSupplementaryCalculations"], - _QM = self._parameters["QualityCriterion"], - _SSV = self.StoredVariables, - _sSc = False, - ) + _qualityW = CostFunction( _W, self._parameters["QualityCriterion"] ) if (not StateInList(_W, _TabuList)) and ( (_qualityW < _qualityR) or StateInList(_R,_TabuList) ): _R, _qualityR = _W, _qualityW if (not StateInList( _R, _TabuList )) and (_qualityR < _qualityS): @@ -241,46 +230,44 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if _qualityS < _qualityBest: _Best, _qualityBest = _S, _qualityS # + self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) ) if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( _Best ) if self._toStore("SimulatedObservationAtCurrentState"): - _HmX = Hm( numpy.asmatrix(numpy.ravel( _Best )).T ) - _HmX = numpy.asmatrix(numpy.ravel( _HmX )).T + _HmX = Hm( _Best ) self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HmX ) - self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) ) self.StoredVariables["CostFunctionJb"].store( 0. ) self.StoredVariables["CostFunctionJo"].store( 0. ) self.StoredVariables["CostFunctionJ" ].store( _qualityBest ) # # Obtention de l'analyse # ---------------------- - Xa = numpy.asmatrix(numpy.ravel( _Best )).T + Xa = _Best # - self.StoredVariables["Analysis"].store( Xa.A1 ) + self.StoredVariables["Analysis"].store( Xa ) # + # Calculs et/ou stockages supplémentaires + # --------------------------------------- + if self._toStore("OMA") or \ + self._toStore("SimulatedObservationAtOptimum"): + HXa = Hm(Xa).reshape((-1,1)) if self._toStore("Innovation") or \ self._toStore("OMB") or \ self._toStore("SimulatedObservationAtBackground"): - HXb = Hm(Xb) - d = Y - HXb - if self._toStore("OMA") or \ - self._toStore("SimulatedObservationAtOptimum"): - HXa = Hm(Xa) - # - # Calculs et/ou stockages supplémentaires - # --------------------------------------- + HXb = Hm(Xb).reshape((-1,1)) + Innovation = Y - HXb if self._toStore("Innovation"): - self.StoredVariables["Innovation"].store( numpy.ravel(d) ) + self.StoredVariables["Innovation"].store( Innovation ) + if self._toStore("OMB"): + self.StoredVariables["OMB"].store( Innovation ) if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) if self._toStore("OMA"): - self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) ) - if self._toStore("OMB"): - self.StoredVariables["OMB"].store( numpy.ravel(d) ) + self.StoredVariables["OMA"].store( Y - HXa ) if self._toStore("SimulatedObservationAtBackground"): - self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) + self.StoredVariables["SimulatedObservationAtBackground"].store( HXb ) if self._toStore("SimulatedObservationAtOptimum"): - self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) + self.StoredVariables["SimulatedObservationAtOptimum"].store( HXa ) # self._post_run(HO) return 0 diff --git a/src/daComposant/daCore/BasicObjects.py b/src/daComposant/daCore/BasicObjects.py index d4b9a27..79db6e7 100644 --- a/src/daComposant/daCore/BasicObjects.py +++ b/src/daComposant/daCore/BasicObjects.py @@ -568,15 +568,10 @@ class FullOperator(object): # if __appliedInX is not None: self.__FO["AppliedInX"] = {} - for key in list(__appliedInX.keys()): - if type( __appliedInX[key] ) is type( numpy.matrix([]) ): - # Pour le cas où l'on a une vraie matrice - self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].A1, numpy.float ).T - elif type( __appliedInX[key] ) is type( numpy.array([]) ) and len(__appliedInX[key].shape) > 1: - # Pour le cas où l'on a un vecteur représenté en array avec 2 dimensions - self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key].reshape(len(__appliedInX[key]),), numpy.float ).T - else: - self.__FO["AppliedInX"][key] = numpy.matrix( __appliedInX[key], numpy.float ).T + for key in __appliedInX: + if isinstance(__appliedInX[key], str): + __appliedInX[key] = PlatformInfo.strvect2liststr( __appliedInX[key] ) + self.__FO["AppliedInX"][key] = numpy.ravel( __appliedInX[key] ).reshape((-1,1)) else: self.__FO["AppliedInX"] = None diff --git a/src/daComposant/daCore/PlatformInfo.py b/src/daComposant/daCore/PlatformInfo.py index 3a01d6a..e0d484d 100644 --- a/src/daComposant/daCore/PlatformInfo.py +++ b/src/daComposant/daCore/PlatformInfo.py @@ -322,7 +322,7 @@ def strmatrix2liststr( __strvect ): __strvect = __strvect.replace(","," ") # Blanc for s in ("]", ")"): __strvect = __strvect.replace(s,";") # "]" et ")" par ";" - __strvect = re.sub(';\s*;',';',__strvect) + __strvect = re.sub(r';\s*;',r';',__strvect) __strvect = __strvect.rstrip(";") # Après ^ et avant v __strmat = [l.split() for l in __strvect.split(";")] return __strmat -- 2.39.2