From 271156222dbc54d911a7017c36dcaede9555b00a Mon Sep 17 00:00:00 2001 From: Jean-Philippe ARGAUD Date: Thu, 11 Oct 2012 18:34:28 +0200 Subject: [PATCH] Adding Mahalanobis Consistency indicator --- src/daComposant/daAlgorithms/3DVAR.py | 9 ++++----- src/daComposant/daAlgorithms/Blue.py | 4 +++- src/daComposant/daCore/BasicObjects.py | 6 ++++-- 3 files changed, 11 insertions(+), 8 deletions(-) diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index 28a8af9..18d7ef2 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -84,7 +84,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): default = [], typecast = tuple, message = "Liste de calculs supplémentaires à stocker et/ou effectuer", - listval = ["APosterioriCovariance", "BMA", "OMA", "OMB", "Innovation", "SigmaObs2"] + listval = ["APosterioriCovariance", "BMA", "OMA", "OMB", "Innovation", "SigmaObs2", "MahalanobisConsistency"] ) def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None): @@ -266,10 +266,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): _HtEE = numpy.asmatrix(numpy.ravel( _HtEE )).T HessienneI.append( ( BI*_ee + Ha((Xa,RI*_HtEE)) ).A1 ) HessienneI = numpy.matrix( HessienneI ) - if numpy.alltrue(numpy.isfinite( HessienneI )): - A = HessienneI.I - else: - raise ValueError("The 3DVAR a posteriori covariance matrix A can not be calculated. Your problem is perhaps too non-linear?") + A = HessienneI.I if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug try: L = numpy.linalg.cholesky( A ) @@ -289,6 +286,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["OMB"].store( numpy.ravel(d) ) if "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["SigmaObs2"].store( float( (d.T * (Y-Hm(Xa))) / R.trace() ) ) + if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: + self.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/len(d) ) ) # logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) diff --git a/src/daComposant/daAlgorithms/Blue.py b/src/daComposant/daAlgorithms/Blue.py index 6fa42d3..b2a426e 100644 --- a/src/daComposant/daAlgorithms/Blue.py +++ b/src/daComposant/daAlgorithms/Blue.py @@ -35,7 +35,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): default = [], typecast = tuple, message = "Liste de calculs supplémentaires à stocker et/ou effectuer", - listval = ["APosterioriCovariance", "BMA", "OMA", "OMB", "Innovation", "SigmaBck2", "SigmaObs2"] + listval = ["APosterioriCovariance", "BMA", "OMA", "OMB", "Innovation", "SigmaBck2", "SigmaObs2", "MahalanobisConsistency"] ) def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None): @@ -130,6 +130,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["SigmaObs2"].store( float( (d.T * (Y-Hm*Xa)) / R.trace() ) ) if "SigmaBck2" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["SigmaBck2"].store( float( (d.T * Hm * (Xa - Xb))/(Hm * B * Hm.T).trace() ) ) + if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: + self.StoredVariables["MahalanobisConsistency"].store( float( 2.*J/len(d) ) ) # logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) diff --git a/src/daComposant/daCore/BasicObjects.py b/src/daComposant/daCore/BasicObjects.py index d2cd22c..b6c9e68 100644 --- a/src/daComposant/daCore/BasicObjects.py +++ b/src/daComposant/daCore/BasicObjects.py @@ -141,8 +141,9 @@ class Algorithm: - CurrentState : état courant lors d'itérations - Analysis : l'analyse - Innovation : l'innovation : d = Y - H Xb - - SigmaObs2 : correction optimale des erreurs d'observation - - SigmaBck2 : correction optimale des erreurs d'ébauche + - SigmaObs2 : indicateur de correction optimale des erreurs d'observation + - SigmaBck2 : indicateur de correction optimale des erreurs d'ébauche + - MahalanobisConsistency : indicateur de consistance des covariances - OMA : Observation moins Analysis : Y - Xa - OMB : Observation moins Background : Y - Xb - AMB : Analysis moins Background : Xa - Xb @@ -167,6 +168,7 @@ class Algorithm: self.StoredVariables["Innovation"] = Persistence.OneVector(name = "Innovation") self.StoredVariables["SigmaObs2"] = Persistence.OneScalar(name = "SigmaObs2") self.StoredVariables["SigmaBck2"] = Persistence.OneScalar(name = "SigmaBck2") + self.StoredVariables["MahalanobisConsistency"] = Persistence.OneScalar(name = "MahalanobisConsistency") self.StoredVariables["OMA"] = Persistence.OneVector(name = "OMA") self.StoredVariables["OMB"] = Persistence.OneVector(name = "OMB") self.StoredVariables["BMA"] = Persistence.OneVector(name = "BMA") -- 2.39.2