From: Jean-Philippe ARGAUD Date: Tue, 13 Oct 2015 14:07:34 +0000 (+0200) Subject: Documentation and source minor corrections for observers X-Git-Tag: V7_7_0rc1~1 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=df3ce121ee60b4b0b4f79d158133d067a0ecc445;p=modules%2Fadao.git Documentation and source minor corrections for observers --- diff --git a/doc/en/ref_algorithm_DerivativeFreeOptimization.rst b/doc/en/ref_algorithm_DerivativeFreeOptimization.rst index 2152a06..86ff57d 100644 --- a/doc/en/ref_algorithm_DerivativeFreeOptimization.rst +++ b/doc/en/ref_algorithm_DerivativeFreeOptimization.rst @@ -167,10 +167,12 @@ The options of the algorithm are the following: calculations or memory consumptions. The default is a void list, none of these variables being calculated and stored by default. The possible names are in the following list: ["CurrentState", "CostFunctionJ", - "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentState", - "SimulatedObservationAtOptimum"]. + "CostFunctionJAtCurrentOptimum", "CurrentOptimum", "IndexOfOptimum", + "InnovationAtCurrentState", "BMA", "OMA", "OMB", + "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentOptimum", + "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"]. - Example : ``{"StoreSupplementaryCalculations":["CurrentState", "CostFunctionJ"]}`` + Example : ``{"StoreSupplementaryCalculations":["BMA", "Innovation"]}`` Information and variables available at the end of the algorithm +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ diff --git a/doc/fr/ref_algorithm_DerivativeFreeOptimization.rst b/doc/fr/ref_algorithm_DerivativeFreeOptimization.rst index 48e2f37..d571016 100644 --- a/doc/fr/ref_algorithm_DerivativeFreeOptimization.rst +++ b/doc/fr/ref_algorithm_DerivativeFreeOptimization.rst @@ -170,10 +170,12 @@ Les options de l'algorithme sont les suivantes: calculs ou du stockage coûteux. La valeur par défaut est une liste vide, aucune de ces variables n'étant calculée et stockée par défaut. Les noms possibles sont dans la liste suivante : ["CurrentState", "CostFunctionJ", - "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentState", - "SimulatedObservationAtOptimum"]. + "CostFunctionJAtCurrentOptimum", "CurrentOptimum", "IndexOfOptimum", + "InnovationAtCurrentState", "BMA", "OMA", "OMB", + "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentOptimum", + "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"]. - Exemple : ``{"StoreSupplementaryCalculations":["CurrentState", "CostFunctionJ"]}`` + Exemple : ``{"StoreSupplementaryCalculations":["BMA", "Innovation"]}`` Informations et variables disponibles à la fin de l'algorithme ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index f63f190..05cf2e9 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -134,21 +134,17 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Hm = HO["Direct"].appliedTo Ha = HO["Adjoint"].appliedInXTo # - # Utilisation éventuelle d'un vecteur H(Xb) précalculé - # ---------------------------------------------------- + # Utilisation éventuelle d'un vecteur H(Xb) précalculé (sans cout) + # ---------------------------------------------------------------- if HO["AppliedToX"] is not None and HO["AppliedToX"].has_key("HXb"): HXb = HO["AppliedToX"]["HXb"] else: HXb = Hm( Xb ) HXb = numpy.asmatrix(numpy.ravel( HXb )).T - # - # Calcul de l'innovation - # ---------------------- if Y.size != HXb.size: raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size)) if max(Y.shape) != max(HXb.shape): raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape)) - d = Y - HXb # # Précalcul des inversions de B et R # ---------------------------------- @@ -170,10 +166,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX ) if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: - self.StoredVariables["InnovationAtCurrentState"].store( _HX ) + self.StoredVariables["InnovationAtCurrentState"].store( _Innovation ) + # Jb = 0.5 * (_X - Xb).T * BI * (_X - Xb) Jo = 0.5 * _Innovation.T * RI * _Innovation J = float( Jb ) + float( Jo ) + # self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) @@ -205,10 +203,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Point de démarrage de l'optimisation : Xini = Xb # ------------------------------------ - if type(Xb) is type(numpy.matrix([])): - Xini = Xb.A1.tolist() - else: - Xini = list(Xb) + Xini = numpy.ravel(Xb) # # Minimisation de la fonctionnelle # -------------------------------- @@ -332,6 +327,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Calculs et/ou stockages supplémentaires # --------------------------------------- + if "Innovation" in self._parameters["StoreSupplementaryCalculations"] or \ + "OMB" in self._parameters["StoreSupplementaryCalculations"] or \ + "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"] or \ + "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: + d = Y - HXb if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["Innovation"].store( numpy.ravel(d) ) if "BMA" in self._parameters["StoreSupplementaryCalculations"]: diff --git a/src/daComposant/daAlgorithms/Blue.py b/src/daComposant/daAlgorithms/Blue.py index 0ad3195..0fe119a 100644 --- a/src/daComposant/daAlgorithms/Blue.py +++ b/src/daComposant/daAlgorithms/Blue.py @@ -76,20 +76,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # ---------------------- self.setParameters(Parameters) # - # Opérateur d'observation - # ----------------------- + # Opérateurs + # ---------- Hm = HO["Tangent"].asMatrix(Xb) Hm = Hm.reshape(Y.size,Xb.size) # ADAO & check shape Ha = HO["Adjoint"].asMatrix(Xb) Ha = Ha.reshape(Xb.size,Y.size) # ADAO & check shape # - # Utilisation éventuelle d'un vecteur H(Xb) précalculé - # ---------------------------------------------------- + # Utilisation éventuelle d'un vecteur H(Xb) précalculé (sans cout) + # ---------------------------------------------------------------- if HO["AppliedToX"] is not None and HO["AppliedToX"].has_key("HXb"): HXb = HO["AppliedToX"]["HXb"] else: HXb = Hm * Xb HXb = numpy.asmatrix(numpy.ravel( HXb )).T + if Y.size != HXb.size: + raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size)) + if max(Y.shape) != max(HXb.shape): + raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape)) # # Précalcul des inversions de B et R # ---------------------------------- @@ -98,10 +102,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Calcul de l'innovation # ---------------------- - if Y.size != HXb.size: - raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size)) - if max(Y.shape) != max(HXb.shape): - raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape)) d = Y - HXb # # Calcul de la matrice de gain et de l'analyse @@ -131,9 +131,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if self._parameters["StoreInternalVariables"] or \ "CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"] or \ "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: + # Jb = 0.5 * (Xa - Xb).T * BI * (Xa - Xb) Jo = 0.5 * oma.T * RI * oma J = float( Jb ) + float( Jo ) + # self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) diff --git a/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py b/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py index e2f900f..69a76f1 100644 --- a/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py +++ b/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py @@ -83,7 +83,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): default = [], typecast = tuple, message = "Liste de calculs supplémentaires à stocker et/ou effectuer", - listval = ["CurrentState", "CostFunctionJ", "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"] + listval = ["CurrentState", "CostFunctionJ", "CostFunctionJAtCurrentOptimum", "CurrentOptimum", "IndexOfOptimum", "InnovationAtCurrentState", "BMA", "OMA", "OMB", "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentOptimum", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"] ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): @@ -96,8 +96,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Paramètres de pilotage # ---------------------- self.setParameters(Parameters) -# self.setParameterValue("StoreInternalVariables",True) -# print self._parameters["StoreInternalVariables"] # # Opérateurs # ---------- @@ -115,34 +113,53 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["CurrentState"].store( _X ) _HX = Hm( _X ) _HX = numpy.asmatrix(numpy.ravel( _HX )).T - if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + _Innovation = Y - _HX + if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \ + "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX ) + if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + self.StoredVariables["InnovationAtCurrentState"].store( _Innovation ) # if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]: if BI is None or RI is None: raise ValueError("Background and Observation error covariance matrix has to be properly defined!") Jb = 0.5 * (_X - Xb).T * BI * (_X - Xb) - Jo = 0.5 * (Y - _HX).T * RI * (Y - _HX) + Jo = 0.5 * (_Innovation).T * RI * (_Innovation) elif QualityMeasure in ["WeightedLeastSquares","WLS"]: if RI is None: raise ValueError("Observation error covariance matrix has to be properly defined!") Jb = 0. - Jo = 0.5 * (Y - _HX).T * RI * (Y - _HX) + Jo = 0.5 * (_Innovation).T * RI * (_Innovation) elif QualityMeasure in ["LeastSquares","LS","L2"]: Jb = 0. - Jo = 0.5 * (Y - _HX).T * (Y - _HX) + Jo = 0.5 * (_Innovation).T * (_Innovation) elif QualityMeasure in ["AbsoluteValue","L1"]: Jb = 0. - Jo = numpy.sum( numpy.abs(Y - _HX) ) + Jo = numpy.sum( numpy.abs(_Innovation) ) elif QualityMeasure in ["MaximumError","ME"]: Jb = 0. - Jo = numpy.max( numpy.abs(Y - _HX) ) + Jo = numpy.max( numpy.abs(_Innovation) ) # J = float( Jb ) + float( Jo ) # self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) + if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ + "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ + "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \ + "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps + if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"]: + self.StoredVariables["IndexOfOptimum"].store( IndexMin ) + if "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] ) + if "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] ) + if "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] ) + self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] ) + self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] ) return J # # Point de démarrage de l'optimisation : Xini = Xb @@ -190,10 +207,27 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # self.StoredVariables["Analysis"].store( Xa.A1 ) # + if "OMA" in self._parameters["StoreSupplementaryCalculations"] or \ + "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: + if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: + HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] + elif "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]: + HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1] + else: + HXa = Hm(Xa) + # + if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: + self.StoredVariables["Innovation"].store( numpy.ravel(d) ) + if "OMB" in self._parameters["StoreSupplementaryCalculations"]: + self.StoredVariables["OMB"].store( numpy.ravel(d) ) + if "BMA" in self._parameters["StoreSupplementaryCalculations"]: + self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) + if "OMA" in self._parameters["StoreSupplementaryCalculations"]: + self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) ) if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) ) if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]: - self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(Hm(Xa)) ) + self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # self._post_run() return 0