default = [],
typecast = tuple,
message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
- listval = ["APosterioriCovariance", "BMA", "OMA", "OMB", "Innovation", "SigmaObs2"]
+ listval = ["APosterioriCovariance", "BMA", "OMA", "OMB", "Innovation", "SigmaObs2", "MahalanobisConsistency"]
)
def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None):
_HtEE = numpy.asmatrix(numpy.ravel( _HtEE )).T
HessienneI.append( ( BI*_ee + Ha((Xa,RI*_HtEE)) ).A1 )
HessienneI = numpy.matrix( HessienneI )
- if numpy.alltrue(numpy.isfinite( HessienneI )):
- A = HessienneI.I
- else:
- raise ValueError("The 3DVAR a posteriori covariance matrix A can not be calculated. Your problem is perhaps too non-linear?")
+ A = HessienneI.I
if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug
try:
L = numpy.linalg.cholesky( A )
self.StoredVariables["OMB"].store( numpy.ravel(d) )
if "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"]:
self.StoredVariables["SigmaObs2"].store( float( (d.T * (Y-Hm(Xa))) / R.trace() ) )
+ if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]:
+ self.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/len(d) ) )
#
logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M")))
logging.debug("%s Terminé"%self._name)
default = [],
typecast = tuple,
message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
- listval = ["APosterioriCovariance", "BMA", "OMA", "OMB", "Innovation", "SigmaBck2", "SigmaObs2"]
+ listval = ["APosterioriCovariance", "BMA", "OMA", "OMB", "Innovation", "SigmaBck2", "SigmaObs2", "MahalanobisConsistency"]
)
def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None):
self.StoredVariables["SigmaObs2"].store( float( (d.T * (Y-Hm*Xa)) / R.trace() ) )
if "SigmaBck2" in self._parameters["StoreSupplementaryCalculations"]:
self.StoredVariables["SigmaBck2"].store( float( (d.T * Hm * (Xa - Xb))/(Hm * B * Hm.T).trace() ) )
+ if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]:
+ self.StoredVariables["MahalanobisConsistency"].store( float( 2.*J/len(d) ) )
#
logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M")))
logging.debug("%s Terminé"%self._name)
- CurrentState : état courant lors d'itérations
- Analysis : l'analyse
- Innovation : l'innovation : d = Y - H Xb
- - SigmaObs2 : correction optimale des erreurs d'observation
- - SigmaBck2 : correction optimale des erreurs d'ébauche
+ - SigmaObs2 : indicateur de correction optimale des erreurs d'observation
+ - SigmaBck2 : indicateur de correction optimale des erreurs d'ébauche
+ - MahalanobisConsistency : indicateur de consistance des covariances
- OMA : Observation moins Analysis : Y - Xa
- OMB : Observation moins Background : Y - Xb
- AMB : Analysis moins Background : Xa - Xb
self.StoredVariables["Innovation"] = Persistence.OneVector(name = "Innovation")
self.StoredVariables["SigmaObs2"] = Persistence.OneScalar(name = "SigmaObs2")
self.StoredVariables["SigmaBck2"] = Persistence.OneScalar(name = "SigmaBck2")
+ self.StoredVariables["MahalanobisConsistency"] = Persistence.OneScalar(name = "MahalanobisConsistency")
self.StoredVariables["OMA"] = Persistence.OneVector(name = "OMA")
self.StoredVariables["OMB"] = Persistence.OneVector(name = "OMB")
self.StoredVariables["BMA"] = Persistence.OneVector(name = "BMA")