calculations or memory consumptions. The default is a void list, none of
these variables being calculated and stored by default. The possible names
are in the following list: ["CurrentState", "CostFunctionJ",
- "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentState",
- "SimulatedObservationAtOptimum"].
+ "CostFunctionJAtCurrentOptimum", "CurrentOptimum", "IndexOfOptimum",
+ "InnovationAtCurrentState", "BMA", "OMA", "OMB",
+ "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentOptimum",
+ "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"].
- Example : ``{"StoreSupplementaryCalculations":["CurrentState", "CostFunctionJ"]}``
+ Example : ``{"StoreSupplementaryCalculations":["BMA", "Innovation"]}``
Information and variables available at the end of the algorithm
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
calculs ou du stockage coûteux. La valeur par défaut est une liste vide,
aucune de ces variables n'étant calculée et stockée par défaut. Les noms
possibles sont dans la liste suivante : ["CurrentState", "CostFunctionJ",
- "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentState",
- "SimulatedObservationAtOptimum"].
+ "CostFunctionJAtCurrentOptimum", "CurrentOptimum", "IndexOfOptimum",
+ "InnovationAtCurrentState", "BMA", "OMA", "OMB",
+ "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentOptimum",
+ "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"].
- Exemple : ``{"StoreSupplementaryCalculations":["CurrentState", "CostFunctionJ"]}``
+ Exemple : ``{"StoreSupplementaryCalculations":["BMA", "Innovation"]}``
Informations et variables disponibles à la fin de l'algorithme
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
Hm = HO["Direct"].appliedTo
Ha = HO["Adjoint"].appliedInXTo
#
- # Utilisation éventuelle d'un vecteur H(Xb) précalculé
- # ----------------------------------------------------
+ # Utilisation éventuelle d'un vecteur H(Xb) précalculé (sans cout)
+ # ----------------------------------------------------------------
if HO["AppliedToX"] is not None and HO["AppliedToX"].has_key("HXb"):
HXb = HO["AppliedToX"]["HXb"]
else:
HXb = Hm( Xb )
HXb = numpy.asmatrix(numpy.ravel( HXb )).T
- #
- # Calcul de l'innovation
- # ----------------------
if Y.size != HXb.size:
raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
if max(Y.shape) != max(HXb.shape):
raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
- d = Y - HXb
#
# Précalcul des inversions de B et R
# ----------------------------------
"SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
- self.StoredVariables["InnovationAtCurrentState"].store( _HX )
+ self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
+ #
Jb = 0.5 * (_X - Xb).T * BI * (_X - Xb)
Jo = 0.5 * _Innovation.T * RI * _Innovation
J = float( Jb ) + float( Jo )
+ #
self.StoredVariables["CostFunctionJb"].store( Jb )
self.StoredVariables["CostFunctionJo"].store( Jo )
self.StoredVariables["CostFunctionJ" ].store( J )
#
# Point de démarrage de l'optimisation : Xini = Xb
# ------------------------------------
- if type(Xb) is type(numpy.matrix([])):
- Xini = Xb.A1.tolist()
- else:
- Xini = list(Xb)
+ Xini = numpy.ravel(Xb)
#
# Minimisation de la fonctionnelle
# --------------------------------
#
# Calculs et/ou stockages supplémentaires
# ---------------------------------------
+ if "Innovation" in self._parameters["StoreSupplementaryCalculations"] or \
+ "OMB" in self._parameters["StoreSupplementaryCalculations"] or \
+ "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"] or \
+ "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]:
+ d = Y - HXb
if "Innovation" in self._parameters["StoreSupplementaryCalculations"]:
self.StoredVariables["Innovation"].store( numpy.ravel(d) )
if "BMA" in self._parameters["StoreSupplementaryCalculations"]:
# ----------------------
self.setParameters(Parameters)
#
- # Opérateur d'observation
- # -----------------------
+ # Opérateurs
+ # ----------
Hm = HO["Tangent"].asMatrix(Xb)
Hm = Hm.reshape(Y.size,Xb.size) # ADAO & check shape
Ha = HO["Adjoint"].asMatrix(Xb)
Ha = Ha.reshape(Xb.size,Y.size) # ADAO & check shape
#
- # Utilisation éventuelle d'un vecteur H(Xb) précalculé
- # ----------------------------------------------------
+ # Utilisation éventuelle d'un vecteur H(Xb) précalculé (sans cout)
+ # ----------------------------------------------------------------
if HO["AppliedToX"] is not None and HO["AppliedToX"].has_key("HXb"):
HXb = HO["AppliedToX"]["HXb"]
else:
HXb = Hm * Xb
HXb = numpy.asmatrix(numpy.ravel( HXb )).T
+ if Y.size != HXb.size:
+ raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
+ if max(Y.shape) != max(HXb.shape):
+ raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
#
# Précalcul des inversions de B et R
# ----------------------------------
#
# Calcul de l'innovation
# ----------------------
- if Y.size != HXb.size:
- raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
- if max(Y.shape) != max(HXb.shape):
- raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
d = Y - HXb
#
# Calcul de la matrice de gain et de l'analyse
if self._parameters["StoreInternalVariables"] or \
"CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"] or \
"MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]:
+ #
Jb = 0.5 * (Xa - Xb).T * BI * (Xa - Xb)
Jo = 0.5 * oma.T * RI * oma
J = float( Jb ) + float( Jo )
+ #
self.StoredVariables["CostFunctionJb"].store( Jb )
self.StoredVariables["CostFunctionJo"].store( Jo )
self.StoredVariables["CostFunctionJ" ].store( J )
default = [],
typecast = tuple,
message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
- listval = ["CurrentState", "CostFunctionJ", "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"]
+ listval = ["CurrentState", "CostFunctionJ", "CostFunctionJAtCurrentOptimum", "CurrentOptimum", "IndexOfOptimum", "InnovationAtCurrentState", "BMA", "OMA", "OMB", "SimulatedObservationAtBackground", "SimulatedObservationAtCurrentOptimum", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"]
)
def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
# Paramètres de pilotage
# ----------------------
self.setParameters(Parameters)
-# self.setParameterValue("StoreInternalVariables",True)
-# print self._parameters["StoreInternalVariables"]
#
# Opérateurs
# ----------
self.StoredVariables["CurrentState"].store( _X )
_HX = Hm( _X )
_HX = numpy.asmatrix(numpy.ravel( _HX )).T
- if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
+ _Innovation = Y - _HX
+ if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"] or \
+ "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
+ if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
+ self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
#
if QualityMeasure in ["AugmentedWeightedLeastSquares","AWLS","DA"]:
if BI is None or RI is None:
raise ValueError("Background and Observation error covariance matrix has to be properly defined!")
Jb = 0.5 * (_X - Xb).T * BI * (_X - Xb)
- Jo = 0.5 * (Y - _HX).T * RI * (Y - _HX)
+ Jo = 0.5 * (_Innovation).T * RI * (_Innovation)
elif QualityMeasure in ["WeightedLeastSquares","WLS"]:
if RI is None:
raise ValueError("Observation error covariance matrix has to be properly defined!")
Jb = 0.
- Jo = 0.5 * (Y - _HX).T * RI * (Y - _HX)
+ Jo = 0.5 * (_Innovation).T * RI * (_Innovation)
elif QualityMeasure in ["LeastSquares","LS","L2"]:
Jb = 0.
- Jo = 0.5 * (Y - _HX).T * (Y - _HX)
+ Jo = 0.5 * (_Innovation).T * (_Innovation)
elif QualityMeasure in ["AbsoluteValue","L1"]:
Jb = 0.
- Jo = numpy.sum( numpy.abs(Y - _HX) )
+ Jo = numpy.sum( numpy.abs(_Innovation) )
elif QualityMeasure in ["MaximumError","ME"]:
Jb = 0.
- Jo = numpy.max( numpy.abs(Y - _HX) )
+ Jo = numpy.max( numpy.abs(_Innovation) )
#
J = float( Jb ) + float( Jo )
#
self.StoredVariables["CostFunctionJb"].store( Jb )
self.StoredVariables["CostFunctionJo"].store( Jo )
self.StoredVariables["CostFunctionJ" ].store( J )
+ if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
+ "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
+ "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"] or \
+ "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+ IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
+ if "IndexOfOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+ self.StoredVariables["IndexOfOptimum"].store( IndexMin )
+ if "CurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+ self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
+ if "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+ self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
+ if "CostFunctionJAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+ self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
+ self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
+ self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
return J
#
# Point de démarrage de l'optimisation : Xini = Xb
#
self.StoredVariables["Analysis"].store( Xa.A1 )
#
+ if "OMA" in self._parameters["StoreSupplementaryCalculations"] or \
+ "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+ if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
+ HXa = self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin]
+ elif "SimulatedObservationAtCurrentOptimum" in self._parameters["StoreSupplementaryCalculations"]:
+ HXa = self.StoredVariables["SimulatedObservationAtCurrentOptimum"][-1]
+ else:
+ HXa = Hm(Xa)
+ #
+ if "Innovation" in self._parameters["StoreSupplementaryCalculations"]:
+ self.StoredVariables["Innovation"].store( numpy.ravel(d) )
+ if "OMB" in self._parameters["StoreSupplementaryCalculations"]:
+ self.StoredVariables["OMB"].store( numpy.ravel(d) )
+ if "BMA" in self._parameters["StoreSupplementaryCalculations"]:
+ self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
+ if "OMA" in self._parameters["StoreSupplementaryCalculations"]:
+ self.StoredVariables["OMA"].store( numpy.ravel(Y) - numpy.ravel(HXa) )
if "SimulatedObservationAtBackground" in self._parameters["StoreSupplementaryCalculations"]:
self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(Hm(Xb)) )
if "SimulatedObservationAtOptimum" in self._parameters["StoreSupplementaryCalculations"]:
- self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(Hm(Xa)) )
+ self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) )
#
self._post_run()
return 0