From: Jean-Philippe ARGAUD Date: Fri, 14 Dec 2018 21:50:59 +0000 (+0100) Subject: Simplifying test for variables to store (2) X-Git-Tag: V9_3_0.1-prealpha1~25 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=953c9745b5a2e2b75fa34fbab7cf3383693b5a0e;p=modules%2Fadao.git Simplifying test for variables to store (2) --- diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index 5d568ae..657eae2 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -169,16 +169,16 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): def CostFunction(x): _X = numpy.asmatrix(numpy.ravel( x )).T if self._parameters["StoreInternalVariables"] or \ - "CurrentState" in self._parameters["StoreSupplementaryCalculations"] or \ - "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + self._toStore("CurrentState") or \ + self._toStore("CurrentOptimum"): self.StoredVariables["CurrentState"].store( _X ) _HX = Hm( _X ) _HX = numpy.asmatrix(numpy.ravel( _HX )).T _Innovation = Y - _HX - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState") or \ + self._toStore("SimulatedObservationAtCurrentOptimum"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX ) - if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("InnovationAtCurrentState"): self.StoredVariables["InnovationAtCurrentState"].store( _Innovation ) # Jb = float( 0.5 * (_X - Xb).T * BI * (_X - Xb) ) @@ -188,24 +188,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) - if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("IndexOfOptimum") or \ + self._toStore("CurrentOptimum") or \ + self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJoAtCurrentOptimum") or \ + self._toStore("SimulatedObservationAtCurrentOptimum"): IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps - if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("IndexOfOptimum"): self.StoredVariables["IndexOfOptimum"].store( IndexMin ) - if "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentOptimum"): self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] ) - if "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentOptimum"): self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] ) - if "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJAtCurrentOptimum"): self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] ) - if "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJbAtCurrentOptimum"): self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] ) - if "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJoAtCurrentOptimum"): self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] ) return J # @@ -295,7 +295,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Correction pour pallier a un bug de TNC sur le retour du Minimum # ---------------------------------------------------------------- - if self._parameters["StoreInternalVariables"] or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): Minimum = self.StoredVariables["CurrentState"][IndexMin] # # Obtention de l'analyse @@ -304,21 +304,21 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # self.StoredVariables["Analysis"].store( Xa.A1 ) # - if "OMA" in self._parameters["StoreSupplementaryCalculations"] or \ - "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA") or \ + self._toStore("SigmaObs2") or \ + self._toStore("SimulationQuantiles") or \ + self._toStore("SimulatedObservationAtOptimum"): + if self._toStore("SimulatedObservationAtCurrentState"): HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] - elif "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + elif self._toStore("SimulatedObservationAtCurrentOptimum"): HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1] else: HXa = Hm(Xa) # # Calcul de la covariance d'analyse # --------------------------------- - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("APosterioriCovariance") or \ + self._toStore("SimulationQuantiles"): HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa) HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape HaM = HO["Adjoint"].asMatrix(ValueForMethodForm = Xa) @@ -346,25 +346,25 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Calculs et/ou stockages supplémentaires # --------------------------------------- - if "Innovation" in self._parameters["StoreSupplementaryCalculations"] or \ - "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"] or \ - "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"] or \ - "OMB" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation") or \ + self._toStore("SigmaObs2") or \ + self._toStore("MahalanobisConsistency") or \ + self._toStore("OMB"): d = Y - HXb - if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( numpy.ravel(d) ) - if "BMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) - if "OMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA"): self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) ) - if "OMB" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMB"): self.StoredVariables["OMB"].store( numpy.ravel(d) ) - if "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SigmaObs2"): TraceR = R.trace(Y.size) self.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(HXa)).T)) ) / TraceR ) - if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("MahalanobisConsistency"): self.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) ) - if "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulationQuantiles"): nech = self._parameters["NumberOfSamplesForQuantiles"] HXa = numpy.matrix(numpy.ravel( HXa )).T YfQ = None @@ -388,9 +388,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if YQ is None: YQ = YfQ[:,indice] else: YQ = numpy.hstack((YQ,YfQ[:,indice])) self.StoredVariables["SimulationQuantiles"].store( YQ ) - if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtBackground"): self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) - if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # self._post_run(HO) diff --git a/src/daComposant/daAlgorithms/4DVAR.py b/src/daComposant/daAlgorithms/4DVAR.py index 4a44533..e5423b6 100644 --- a/src/daComposant/daAlgorithms/4DVAR.py +++ b/src/daComposant/daAlgorithms/4DVAR.py @@ -169,8 +169,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): def CostFunction(x): _X = numpy.asmatrix(numpy.ravel( x )).T if self._parameters["StoreInternalVariables"] or \ - "CurrentState" in self._parameters["StoreSupplementaryCalculations"] or \ - "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + self._toStore("CurrentState") or \ + self._toStore("CurrentOptimum"): self.StoredVariables["CurrentState"].store( _X ) Jb = 0.5 * (_X - Xb).T * BI * (_X - Xb) self.DirectCalculation = [None,] @@ -208,21 +208,21 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) - if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("IndexOfOptimum") or \ + self._toStore("CurrentOptimum") or \ + self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJoAtCurrentOptimum"): IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps - if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("IndexOfOptimum"): self.StoredVariables["IndexOfOptimum"].store( IndexMin ) - if "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentOptimum"): self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] ) - if "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJAtCurrentOptimum"): self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] ) - if "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJbAtCurrentOptimum"): self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] ) - if "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJoAtCurrentOptimum"): self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] ) return J # @@ -326,7 +326,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Correction pour pallier a un bug de TNC sur le retour du Minimum # ---------------------------------------------------------------- - if self._parameters["StoreInternalVariables"] or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): Minimum = self.StoredVariables["CurrentState"][IndexMin] # # Obtention de l'analyse @@ -337,7 +337,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Calculs et/ou stockages supplémentaires # --------------------------------------- - if "BMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) # self._post_run(HO) diff --git a/src/daComposant/daAlgorithms/AdjointTest.py b/src/daComposant/daAlgorithms/AdjointTest.py index c0a7c67..5acd667 100644 --- a/src/daComposant/daAlgorithms/AdjointTest.py +++ b/src/daComposant/daAlgorithms/AdjointTest.py @@ -98,9 +98,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Y = numpy.asmatrix(numpy.ravel( Hm( X ) )).T Y = numpy.asmatrix(numpy.ravel( Y )).T NormeY = numpy.linalg.norm( Y ) - if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(X) ) - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(Y) ) # if len(self._parameters["InitialDirection"]) == 0: diff --git a/src/daComposant/daAlgorithms/Blue.py b/src/daComposant/daAlgorithms/Blue.py index ab5040c..510358d 100644 --- a/src/daComposant/daAlgorithms/Blue.py +++ b/src/daComposant/daAlgorithms/Blue.py @@ -136,18 +136,18 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Calcul de la fonction coût # -------------------------- if self._parameters["StoreInternalVariables"] or \ - "CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"] or \ - "OMA" in self._parameters["StoreSupplementaryCalculations"] or \ - "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"] or \ - "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: + self._toStore("CostFunctionJ") or \ + self._toStore("OMA") or \ + self._toStore("SigmaObs2") or \ + self._toStore("MahalanobisConsistency") or \ + self._toStore("SimulatedObservationAtCurrentState") or \ + self._toStore("SimulatedObservationAtOptimum") or \ + self._toStore("SimulationQuantiles"): HXa = Hm * Xa oma = Y - HXa if self._parameters["StoreInternalVariables"] or \ - "CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"] or \ - "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: + self._toStore("CostFunctionJ") or \ + self._toStore("MahalanobisConsistency"): Jb = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) ) Jo = float( 0.5 * oma.T * RI * oma ) J = Jb + Jo @@ -157,8 +157,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Calcul de la covariance d'analyse # --------------------------------- - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("APosterioriCovariance") or \ + self._toStore("SimulationQuantiles"): if (Y.size <= Xb.size): K = B * Ha * (R + Hm * B * Ha).I elif (Y.size > Xb.size): K = (BI + Ha * RI * Hm).I * Ha * RI A = B - K * Hm * B @@ -175,24 +175,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Calculs et/ou stockages supplémentaires # --------------------------------------- - if self._parameters["StoreInternalVariables"] or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) ) - if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( numpy.ravel(d) ) - if "BMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) - if "OMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA"): self.StoredVariables["OMA"].store( numpy.ravel(oma) ) - if "OMB" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMB"): self.StoredVariables["OMB"].store( numpy.ravel(d) ) - if "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SigmaObs2"): TraceR = R.trace(Y.size) self.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(oma)).T)) ) / TraceR ) - if "SigmaBck2" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SigmaBck2"): self.StoredVariables["SigmaBck2"].store( float( (d.T * Hm * (Xa - Xb))/(Hm * B * Hm.T).trace() ) ) - if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("MahalanobisConsistency"): self.StoredVariables["MahalanobisConsistency"].store( float( 2.*J/d.size ) ) - if "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulationQuantiles"): nech = self._parameters["NumberOfSamplesForQuantiles"] YfQ = None for i in range(nech): @@ -215,11 +215,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if YQ is None: YQ = YfQ[:,indice] else: YQ = numpy.hstack((YQ,YfQ[:,indice])) self.StoredVariables["SimulationQuantiles"].store( YQ ) - if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtBackground"): self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) ) - if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # self._post_run(HO) diff --git a/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py b/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py index f3230d6..975bfdb 100644 --- a/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py +++ b/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py @@ -136,10 +136,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): _HX = Hm( _X ) _HX = numpy.asmatrix(numpy.ravel( _HX )).T _Innovation = Y - _HX - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState") or \ + self._toStore("SimulatedObservationAtCurrentOptimum"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX ) - if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("InnovationAtCurrentState"): self.StoredVariables["InnovationAtCurrentState"].store( _Innovation ) # if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]: @@ -167,24 +167,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) - if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("IndexOfOptimum") or \ + self._toStore("CurrentOptimum") or \ + self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJoAtCurrentOptimum") or \ + self._toStore("SimulatedObservationAtCurrentOptimum"): IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps - if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("IndexOfOptimum"): self.StoredVariables["IndexOfOptimum"].store( IndexMin ) - if "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentOptimum"): self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] ) - if "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentOptimum"): self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] ) - if "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJAtCurrentOptimum"): self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] ) - if "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJbAtCurrentOptimum"): self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] ) - if "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJoAtCurrentOptimum"): self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] ) return J # @@ -379,28 +379,28 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Calculs et/ou stockages supplémentaires # --------------------------------------- - if "OMA" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA" ) or \ + self._toStore("SimulatedObservationAtOptimum"): + if self._toStore("SimulatedObservationAtCurrentState"): HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] - elif "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + elif self._toStore("SimulatedObservationAtCurrentOptimum"): HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1] else: HXa = Hm(Xa) - if "Innovation" in self._parameters["StoreSupplementaryCalculations"] or \ - "OMB" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation") or \ + self._toStore("OMB"): d = Y - HXb - if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( numpy.ravel(d) ) - if "OMB" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMB"): self.StoredVariables["OMB"].store( numpy.ravel(d) ) - if "BMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) - if "OMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA"): self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) ) - if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtBackground"): self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) ) - if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # self._post_run() diff --git a/src/daComposant/daAlgorithms/DifferentialEvolution.py b/src/daComposant/daAlgorithms/DifferentialEvolution.py index 74cd606..f943dae 100644 --- a/src/daComposant/daAlgorithms/DifferentialEvolution.py +++ b/src/daComposant/daAlgorithms/DifferentialEvolution.py @@ -166,10 +166,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): _HX = Hm( _X ) _HX = numpy.asmatrix(numpy.ravel( _HX )).T _Innovation = Y - _HX - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState") or \ + self._toStore("SimulatedObservationAtCurrentOptimum"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX ) - if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("InnovationAtCurrentState"): self.StoredVariables["InnovationAtCurrentState"].store( _Innovation ) # if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]: @@ -197,24 +197,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) - if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("IndexOfOptimum") or \ + self._toStore("CurrentOptimum") or \ + self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJoAtCurrentOptimum") or \ + self._toStore("SimulatedObservationAtCurrentOptimum"): IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps - if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("IndexOfOptimum"): self.StoredVariables["IndexOfOptimum"].store( IndexMin ) - if "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentOptimum"): self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] ) - if "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentOptimum"): self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] ) - if "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJAtCurrentOptimum"): self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] ) - if "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJbAtCurrentOptimum"): self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] ) - if "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJoAtCurrentOptimum"): self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] ) return J # @@ -249,28 +249,27 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Calculs et/ou stockages supplémentaires # --------------------------------------- - if "OMA" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA") or self._toStore("SimulatedObservationAtOptimum"): + if self._toStore("SimulatedObservationAtCurrentState"): HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] - elif "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + elif self._toStore("SimulatedObservationAtCurrentOptimum"): HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1] else: HXa = Hm(Xa) - if "Innovation" in self._parameters["StoreSupplementaryCalculations"] or \ - "OMB" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation") or \ + self._toStore("OMB"): d = Y - HXb - if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( numpy.ravel(d) ) - if "OMB" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMB"): self.StoredVariables["OMB"].store( numpy.ravel(d) ) - if "BMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) - if "OMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA"): self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) ) - if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtBackground"): self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) ) - if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # self._post_run() diff --git a/src/daComposant/daAlgorithms/EnsembleBlue.py b/src/daComposant/daAlgorithms/EnsembleBlue.py index 303b8ea..48191a9 100644 --- a/src/daComposant/daAlgorithms/EnsembleBlue.py +++ b/src/daComposant/daAlgorithms/EnsembleBlue.py @@ -96,14 +96,14 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # ----------------------------------------------- for iens in range(nb_ens): HXb = Hm * Xb[iens] - if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtBackground"): self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) d = EnsembleY[:,iens] - HXb - if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( numpy.ravel(d) ) Xa = Xb[iens] + K*d self.StoredVariables["CurrentState"].store( Xa ) - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( Hm * Xa ) # # Fabrication de l'analyse @@ -111,7 +111,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Members = self.StoredVariables["CurrentState"][-nb_ens:] Xa = numpy.matrix( Members ).mean(axis=0) self.StoredVariables["Analysis"].store( Xa.A1 ) - if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel( Hm * Xa ) ) # self._post_run(HO) diff --git a/src/daComposant/daAlgorithms/EnsembleKalmanFilter.py b/src/daComposant/daAlgorithms/EnsembleKalmanFilter.py index 7d74ed6..1655796 100644 --- a/src/daComposant/daAlgorithms/EnsembleKalmanFilter.py +++ b/src/daComposant/daAlgorithms/EnsembleKalmanFilter.py @@ -106,11 +106,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Précalcul des inversions de B et R # ---------------------------------- - if self._parameters["StoreInternalVariables"] \ - or "CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"] \ - or "CostFunctionJb" in self._parameters["StoreSupplementaryCalculations"] \ - or "CostFunctionJo" in self._parameters["StoreSupplementaryCalculations"] \ - or "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or \ + self._toStore("CostFunctionJ") or \ + self._toStore("CostFunctionJb") or \ + self._toStore("CostFunctionJo") or \ + self._toStore("APosterioriCovariance"): BI = B.getI() RI = R.getI() BIdemi = B.choleskyI() @@ -129,7 +129,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): else: Qn = Q # self.StoredVariables["Analysis"].store( Xb.A1 ) - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("APosterioriCovariance"): self.StoredVariables["APosterioriCovariance"].store( Pn ) covarianceXa = Pn Xa = Xb @@ -193,29 +193,29 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["Analysis"].store( Xa ) # del Yo, PfHT, HPfHT - if self._parameters["StoreInternalVariables"] \ - or "CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"] \ - or "CostFunctionJb" in self._parameters["StoreSupplementaryCalculations"] \ - or "CostFunctionJo" in self._parameters["StoreSupplementaryCalculations"] \ - or "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"] \ - or "Innovation" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or \ + self._toStore("CostFunctionJ") or \ + self._toStore("CostFunctionJb") or \ + self._toStore("CostFunctionJo") or \ + self._toStore("APosterioriCovariance") or \ + self._toStore("Innovation"): d = Ynpu - numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T self.StoredVariables["Innovation"].store( d ) if self._parameters["StoreInternalVariables"] \ - or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + or self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( Xn ) - if self._parameters["StoreInternalVariables"] \ - or "CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"] \ - or "CostFunctionJb" in self._parameters["StoreSupplementaryCalculations"] \ - or "CostFunctionJo" in self._parameters["StoreSupplementaryCalculations"] \ - or "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or \ + self._toStore("CostFunctionJ") or \ + self._toStore("CostFunctionJb") or \ + self._toStore("CostFunctionJo") or \ + self._toStore("APosterioriCovariance"): Jb = 0.5 * (Xa - Xb).T * BI * (Xa - Xb) Jo = 0.5 * d.T * RI * d J = float( Jb ) + float( Jo ) self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("APosterioriCovariance"): Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xa) Ht = Ht.reshape(__p,__n) # ADAO & check shape Pf = 0. @@ -233,10 +233,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # ---------------------------------------------------------------- if self._parameters["EstimationOf"] == "Parameters": self.StoredVariables["Analysis"].store( Xa.A1 ) - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("APosterioriCovariance"): self.StoredVariables["APosterioriCovariance"].store( covarianceXa ) # - if "BMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) # self._post_run(HO) diff --git a/src/daComposant/daAlgorithms/ExtendedBlue.py b/src/daComposant/daAlgorithms/ExtendedBlue.py index 6706eeb..f609c09 100644 --- a/src/daComposant/daAlgorithms/ExtendedBlue.py +++ b/src/daComposant/daAlgorithms/ExtendedBlue.py @@ -137,18 +137,18 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Calcul de la fonction coût # -------------------------- if self._parameters["StoreInternalVariables"] or \ - "CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"] or \ - "OMA" in self._parameters["StoreSupplementaryCalculations"] or \ - "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"] or \ - "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: + self._toStore("CostFunctionJ") or \ + self._toStore("OMA") or \ + self._toStore("SigmaObs2") or \ + self._toStore("MahalanobisConsistency") or \ + self._toStore("SimulatedObservationAtCurrentState") or \ + self._toStore("SimulatedObservationAtOptimum") or \ + self._toStore("SimulationQuantiles"): HXa = numpy.matrix(numpy.ravel( H( Xa ) )).T oma = Y - HXa if self._parameters["StoreInternalVariables"] or \ - "CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"] or \ - "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: + self._toStore("CostFunctionJ") or \ + self._toStore("MahalanobisConsistency"): Jb = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) ) Jo = float( 0.5 * oma.T * RI * oma ) J = Jb + Jo @@ -158,8 +158,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Calcul de la covariance d'analyse # --------------------------------- - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("APosterioriCovariance") or \ + self._toStore("SimulationQuantiles"): if (Y.size <= Xb.size): K = B * Ha * (R + Hm * B * Ha).I elif (Y.size > Xb.size): K = (BI + Ha * RI * Hm).I * Ha * RI A = B - K * Hm * B @@ -176,24 +176,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Calculs et/ou stockages supplémentaires # --------------------------------------- - if self._parameters["StoreInternalVariables"] or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) ) - if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( numpy.ravel(d) ) - if "BMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) - if "OMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA"): self.StoredVariables["OMA"].store( numpy.ravel(oma) ) - if "OMB" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMB"): self.StoredVariables["OMB"].store( numpy.ravel(d) ) - if "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SigmaObs2"): TraceR = R.trace(Y.size) self.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(oma)).T)) ) / TraceR ) - if "SigmaBck2" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SigmaBck2"): self.StoredVariables["SigmaBck2"].store( float( (d.T * Hm * (Xa - Xb))/(Hm * B * Hm.T).trace() ) ) - if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("MahalanobisConsistency"): self.StoredVariables["MahalanobisConsistency"].store( float( 2.*J/d.size ) ) - if "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulationQuantiles"): nech = self._parameters["NumberOfSamplesForQuantiles"] HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa) HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape @@ -218,11 +218,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if YQ is None: YQ = YfQ[:,indice] else: YQ = numpy.hstack((YQ,YfQ[:,indice])) self.StoredVariables["SimulationQuantiles"].store( YQ ) - if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtBackground"): self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) ) - if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # self._post_run(HO) diff --git a/src/daComposant/daAlgorithms/FunctionTest.py b/src/daComposant/daAlgorithms/FunctionTest.py index 5ce49ca..0273906 100644 --- a/src/daComposant/daAlgorithms/FunctionTest.py +++ b/src/daComposant/daAlgorithms/FunctionTest.py @@ -111,7 +111,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # ---------- Ys = [] for i in range(self._parameters["NumberOfRepetition"]): - if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xn) ) print(" %s\n"%("-"*75,)) if self._parameters["NumberOfRepetition"] > 1: @@ -132,7 +132,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msgs += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Yn, dtype=mfp ) msgs += (" L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Yn ) print(msgs) - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(Yn) ) # Ys.append( copy.copy( numpy.ravel( diff --git a/src/daComposant/daAlgorithms/GradientTest.py b/src/daComposant/daAlgorithms/GradientTest.py index ba1198d..5de0f0d 100644 --- a/src/daComposant/daAlgorithms/GradientTest.py +++ b/src/daComposant/daAlgorithms/GradientTest.py @@ -121,9 +121,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): FX = numpy.asmatrix(numpy.ravel( Hm( X ) )).T NormeX = numpy.linalg.norm( X ) NormeFX = numpy.linalg.norm( FX ) - if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xn) ) - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(FX) ) # if len(self._parameters["InitialDirection"]) == 0: @@ -237,9 +237,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): FX_plus_dX = Hm( X + dX ) FX_plus_dX = numpy.asmatrix(numpy.ravel( FX_plus_dX )).T # - if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(X + dX) ) - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(FX_plus_dX) ) # NormedX = numpy.linalg.norm( dX ) diff --git a/src/daComposant/daAlgorithms/LinearLeastSquares.py b/src/daComposant/daAlgorithms/LinearLeastSquares.py index 0f47c39..3022165 100644 --- a/src/daComposant/daAlgorithms/LinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/LinearLeastSquares.py @@ -64,13 +64,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Calcul de la fonction coût # -------------------------- if self._parameters["StoreInternalVariables"] or \ - "CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"] or \ - "OMA" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + self._toStore("CostFunctionJ") or \ + self._toStore("OMA") or \ + self._toStore("SimulatedObservationAtOptimum"): HXa = Hm * Xa oma = Y - HXa if self._parameters["StoreInternalVariables"] or \ - "CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"]: + self._toStore("CostFunctionJ"): Jb = 0. Jo = 0.5 * oma.T * RI * oma J = float( Jb ) + float( Jo ) @@ -80,13 +80,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Calculs et/ou stockages supplémentaires # --------------------------------------- - if self._parameters["StoreInternalVariables"] or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) ) - if "OMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA"): self.StoredVariables["OMA"].store( numpy.ravel(oma) ) - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) ) - if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # self._post_run(HO) diff --git a/src/daComposant/daAlgorithms/LinearityTest.py b/src/daComposant/daAlgorithms/LinearityTest.py index 5525cf7..c3b698a 100644 --- a/src/daComposant/daAlgorithms/LinearityTest.py +++ b/src/daComposant/daAlgorithms/LinearityTest.py @@ -112,9 +112,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): FX = numpy.asmatrix(numpy.ravel( Hm( Xn ) )).T NormeX = numpy.linalg.norm( Xn ) NormeFX = numpy.linalg.norm( FX ) - if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xn) ) - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(FX) ) # # Fabrication de la direction de l'increment dX @@ -251,14 +251,14 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): dX = amplitude * dX0 # if self._parameters["ResiduFormula"] == "CenteredDL": - if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xn + dX) ) self.StoredVariables["CurrentState"].store( numpy.ravel(Xn - dX) ) # FX_plus_dX = numpy.asmatrix(numpy.ravel( Hm( Xn + dX ) )).T FX_moins_dX = numpy.asmatrix(numpy.ravel( Hm( Xn - dX ) )).T # - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(FX_plus_dX) ) self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(FX_moins_dX) ) # @@ -269,12 +269,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msgs += "\n" + __marge + msg # if self._parameters["ResiduFormula"] == "Taylor": - if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xn + dX) ) # FX_plus_dX = numpy.asmatrix(numpy.ravel( Hm( Xn + dX ) )).T # - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(FX_plus_dX) ) # Residu = numpy.linalg.norm( FX_plus_dX - FX - amplitude * GradFxdX ) / NormeFX @@ -284,7 +284,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msgs += "\n" + __marge + msg # if self._parameters["ResiduFormula"] == "NominalTaylor": - if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xn + dX) ) self.StoredVariables["CurrentState"].store( numpy.ravel(Xn - dX) ) self.StoredVariables["CurrentState"].store( numpy.ravel(dX) ) @@ -293,7 +293,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): FX_moins_dX = numpy.asmatrix(numpy.ravel( Hm( Xn - dX ) )).T FdX = numpy.asmatrix(numpy.ravel( Hm( dX ) )).T # - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(FX_plus_dX) ) self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(FX_moins_dX) ) self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(FdX) ) @@ -308,7 +308,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msgs += "\n" + __marge + msg # if self._parameters["ResiduFormula"] == "NominalTaylorRMS": - if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xn + dX) ) self.StoredVariables["CurrentState"].store( numpy.ravel(Xn - dX) ) self.StoredVariables["CurrentState"].store( numpy.ravel(dX) ) @@ -317,7 +317,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): FX_moins_dX = numpy.asmatrix(numpy.ravel( Hm( Xn - dX ) )).T FdX = numpy.asmatrix(numpy.ravel( Hm( dX ) )).T # - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(FX_plus_dX) ) self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(FX_moins_dX) ) self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(FdX) ) diff --git a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py index aaa8a2e..6060817 100644 --- a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py @@ -136,16 +136,16 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): def CostFunction(x): _X = numpy.asmatrix(numpy.ravel( x )).T if self._parameters["StoreInternalVariables"] or \ - "CurrentState" in self._parameters["StoreSupplementaryCalculations"] or \ - "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + self._toStore("CurrentState") or \ + self._toStore("CurrentOptimum"): self.StoredVariables["CurrentState"].store( _X ) _HX = Hm( _X ) _HX = numpy.asmatrix(numpy.ravel( _HX )).T _Innovation = Y - _HX - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState") or \ + self._toStore("SimulatedObservationAtCurrentOptimum"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX ) - if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("InnovationAtCurrentState"): self.StoredVariables["InnovationAtCurrentState"].store( _Innovation ) # Jb = 0. @@ -155,24 +155,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) - if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("IndexOfOptimum") or \ + self._toStore("CurrentOptimum") or \ + self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJoAtCurrentOptimum") or \ + self._toStore("SimulatedObservationAtCurrentOptimum"): IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps - if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("IndexOfOptimum"): self.StoredVariables["IndexOfOptimum"].store( IndexMin ) - if "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentOptimum"): self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] ) - if "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentOptimum"): self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] ) - if "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJAtCurrentOptimum"): self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] ) - if "CostFunctionJbAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJbAtCurrentOptimum"): self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] ) - if "CostFunctionJoAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CostFunctionJoAtCurrentOptimum"): self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] ) return J # @@ -194,7 +194,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Jo = float( 0.5 * _Innovation.T * RI * _Innovation ) J = Jb + Jo if self._parameters["StoreInternalVariables"] or \ - "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( _X ) self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) @@ -300,7 +300,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Correction pour pallier a un bug de TNC sur le retour du Minimum # ---------------------------------------------------------------- - if self._parameters["StoreInternalVariables"] or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): Minimum = self.StoredVariables["CurrentState"][IndexMin] # # Obtention de l'analyse @@ -309,11 +309,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # self.StoredVariables["Analysis"].store( Xa.A1 ) # - if "OMA" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA") or \ + self._toStore("SimulatedObservationAtOptimum"): + if self._toStore("SimulatedObservationAtCurrentState"): HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] - elif "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + elif self._toStore("SimulatedObservationAtCurrentOptimum"): HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1] else: HXa = Hm(Xa) @@ -321,20 +321,19 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Calculs et/ou stockages supplémentaires # --------------------------------------- - if "Innovation" in self._parameters["StoreSupplementaryCalculations"] or \ - "OMB" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation") or self._toStore("OMB"): d = Y - HXb - if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( numpy.ravel(d) ) - if "BMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) - if "OMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA"): self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) ) - if "OMB" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMB"): self.StoredVariables["OMB"].store( numpy.ravel(d) ) - if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtBackground"): self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) - if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # self._post_run(HO) diff --git a/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py b/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py index 78f2c32..9e9c4f1 100644 --- a/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py +++ b/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py @@ -213,7 +213,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): qBest = copy.copy( quality ) logging.debug("%s Initialisation, Insecte = %s, Qualité = %s"%(self._name, str(Best), str(qBest))) # - if self._parameters["StoreInternalVariables"] or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( Best ) self.StoredVariables["CostFunctionJb"].store( 0. ) self.StoredVariables["CostFunctionJo"].store( 0. ) @@ -239,9 +239,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): qBest = copy.copy( quality ) logging.debug("%s Etape %i, Insecte = %s, Qualité = %s"%(self._name, n, str(Best), str(qBest))) # - if self._parameters["StoreInternalVariables"] or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( Best ) - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): _HmX = Hm( numpy.asmatrix(numpy.ravel( Best )).T ) _HmX = numpy.asmatrix(numpy.ravel( _HmX )).T self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HmX ) @@ -258,28 +258,28 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # self.StoredVariables["Analysis"].store( Xa.A1 ) # - if "Innovation" in self._parameters["StoreSupplementaryCalculations"] or \ - "OMB" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation") or \ + self._toStore("OMB") or \ + self._toStore("SimulatedObservationAtBackground"): HXb = Hm(Xb) d = Y - HXb - if "OMA" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA") or \ + self._toStore("SimulatedObservationAtOptimum"): HXa = Hm(Xa) # # Calculs et/ou stockages supplémentaires # --------------------------------------- - if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( numpy.ravel(d) ) - if "BMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) - if "OMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA"): self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) ) - if "OMB" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMB"): self.StoredVariables["OMB"].store( numpy.ravel(d) ) - if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtBackground"): self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) - if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # self._post_run(HO) diff --git a/src/daComposant/daAlgorithms/QuantileRegression.py b/src/daComposant/daAlgorithms/QuantileRegression.py index 3cf9ab3..11cc832 100644 --- a/src/daComposant/daAlgorithms/QuantileRegression.py +++ b/src/daComposant/daAlgorithms/QuantileRegression.py @@ -102,11 +102,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # ------------------------------ def CostFunction(x): _X = numpy.asmatrix(numpy.ravel( x )).T - if self._parameters["StoreInternalVariables"] or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( _X ) _HX = Hm( _X ) _HX = numpy.asmatrix(numpy.ravel( _HX )).T - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX ) Jb = 0. Jo = 0. @@ -153,23 +153,23 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # self.StoredVariables["Analysis"].store( Xa.A1 ) # - if "OMA" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA") or \ + self._toStore("SimulatedObservationAtOptimum"): HXa = Hm(Xa) # # Calculs et/ou stockages supplémentaires # --------------------------------------- - if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( numpy.ravel(d) ) - if "BMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb - Xa) ) - if "OMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA"): self.StoredVariables["OMA"].store( numpy.ravel(Y - HXa) ) - if "OMB" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMB"): self.StoredVariables["OMB"].store( numpy.ravel(d) ) - if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtBackground"): self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) - if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # self._post_run(HO) diff --git a/src/daComposant/daAlgorithms/SamplingTest.py b/src/daComposant/daAlgorithms/SamplingTest.py index 98a2d50..ab8b44c 100644 --- a/src/daComposant/daAlgorithms/SamplingTest.py +++ b/src/daComposant/daAlgorithms/SamplingTest.py @@ -154,11 +154,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Jo = numpy.max( numpy.abs(Y - _HX) ) # J = float( Jb ) + float( Jo ) - if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( _X ) - if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("InnovationAtCurrentState"): self.StoredVariables["InnovationAtCurrentState"].store( Y - _HX ) - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX ) self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) diff --git a/src/daComposant/daAlgorithms/TabuSearch.py b/src/daComposant/daAlgorithms/TabuSearch.py index 23a6c85..a683d5a 100644 --- a/src/daComposant/daAlgorithms/TabuSearch.py +++ b/src/daComposant/daAlgorithms/TabuSearch.py @@ -223,9 +223,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if _qualityS < _qualityBest: _Best, _qualityBest = _S, _qualityS # - if self._parameters["StoreInternalVariables"] or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( _Best ) - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): _HmX = Hm( numpy.asmatrix(numpy.ravel( _Best )).T ) _HmX = numpy.asmatrix(numpy.ravel( _HmX )).T self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HmX ) @@ -239,28 +239,28 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # self.StoredVariables["Analysis"].store( Xa.A1 ) # - if "Innovation" in self._parameters["StoreSupplementaryCalculations"] or \ - "OMB" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation") or \ + self._toStore("OMB") or \ + self._toStore("SimulatedObservationAtBackground"): HXb = Hm(Xb) d = Y - HXb - if "OMA" in self._parameters["StoreSupplementaryCalculations"] or \ - "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA") or \ + self._toStore("SimulatedObservationAtOptimum"): HXa = Hm(Xa) # # Calculs et/ou stockages supplémentaires # --------------------------------------- - if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( numpy.ravel(d) ) - if "BMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) - if "OMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMA"): self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) ) - if "OMB" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("OMB"): self.StoredVariables["OMB"].store( numpy.ravel(d) ) - if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtBackground"): self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) - if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # self._post_run(HO) diff --git a/src/daComposant/daAlgorithms/TangentTest.py b/src/daComposant/daAlgorithms/TangentTest.py index f2b1862..88389b9 100644 --- a/src/daComposant/daAlgorithms/TangentTest.py +++ b/src/daComposant/daAlgorithms/TangentTest.py @@ -105,9 +105,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): FX = numpy.asmatrix(numpy.ravel( Hm( Xn ) )).T NormeX = numpy.linalg.norm( Xn ) NormeFX = numpy.linalg.norm( FX ) - if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xn) ) - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(FX) ) # # Fabrication de la direction de l'increment dX diff --git a/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py b/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py index b2a8bed..50e22d9 100644 --- a/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py +++ b/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py @@ -154,7 +154,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Précalcul des inversions de B et R # ---------------------------------- - if self._parameters["StoreInternalVariables"]: + if self._parameters["StoreInternalVariables"] \ + or self._toStore("CostFunctionJ") \ + or self._toStore("CostFunctionJb") \ + or self._toStore("CostFunctionJo"): BI = B.getI() RI = R.getI() # @@ -167,7 +170,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Pn = B # self.StoredVariables["Analysis"].store( Xn.A1 ) - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("APosterioriCovariance"): self.StoredVariables["APosterioriCovariance"].store( Pn ) covarianceXa = Pn Xa = Xn @@ -268,23 +271,27 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1) # self.StoredVariables["Analysis"].store( Xn.A1 ) - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("APosterioriCovariance"): self.StoredVariables["APosterioriCovariance"].store( Pn ) - if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( numpy.ravel( d.A1 ) ) - if self._parameters["StoreInternalVariables"]: + if self._parameters["StoreInternalVariables"] \ + or self._toStore("CurrentState"): + self.StoredVariables["CurrentState"].store( Xn ) + if self._parameters["StoreInternalVariables"] \ + or self._toStore("CostFunctionJ") \ + or self._toStore("CostFunctionJb") \ + or self._toStore("CostFunctionJo"): Jb = 0.5 * (Xn - Xb).T * BI * (Xn - Xb) Jo = 0.5 * d.T * RI * d J = float( Jb ) + float( Jo ) - if self._parameters["StoreInternalVariables"] or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: - self.StoredVariables["CurrentState"].store( Xn ) self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) if J < previousJMinimum: previousJMinimum = J Xa = Xn - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("APosterioriCovariance"): covarianceXa = Pn else: Xa = Xn @@ -294,10 +301,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # ---------------------------------------------------------------- if self._parameters["EstimationOf"] == "Parameters": self.StoredVariables["Analysis"].store( Xa.A1 ) - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("APosterioriCovariance"): self.StoredVariables["APosterioriCovariance"].store( covarianceXa ) # - if "BMA" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("BMA"): self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) # self._post_run(HO)