From 78b7b5f34873270077e325a43a5d2501a190475c Mon Sep 17 00:00:00 2001 From: Jean-Philippe ARGAUD Date: Fri, 16 Nov 2012 22:22:42 +0100 Subject: [PATCH] Correcting treatments of array/matrix sizes --- doc/glossary.rst | 6 +++--- src/daComposant/daAlgorithms/3DVAR.py | 20 +++++++++++++------- src/daComposant/daAlgorithms/Blue.py | 14 ++++++++++---- src/daComposant/daAlgorithms/EnsembleBlue.py | 6 +++--- 4 files changed, 29 insertions(+), 17 deletions(-) diff --git a/doc/glossary.rst b/doc/glossary.rst index 8a2253a..81f0654 100644 --- a/doc/glossary.rst +++ b/doc/glossary.rst @@ -44,16 +44,16 @@ Glossary SigmaBck2 Keyword to indicate the Desroziers-Ivanov parameter measuring the background part consistency of the data assimilation optimal state - estimation. + estimation. It can be compared to 1. SigmaObs2 Keyword to indicate the Desroziers-Ivanov parameter measuring the observation part consistency of the data assimilation optimal state - estimation. + estimation. It can be compared to 1. MahalanobisConsistency Keyword to indicate the Mahalanobis parameter measuring the consistency of - the data assimilation optimal state estimation. + the data assimilation optimal state estimation. It can be compared to 1. analysis The optimal state estimation through a data assimilation or optimization diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index e288f18..7650d80 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -258,11 +258,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # --------------------------------- if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: HtM = H["Tangent"].asMatrix(ValueForMethodForm = Xa) - HtM = HtM.reshape(len(Y),len(Xa.A1)) # ADAO & check shape + HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape HaM = H["Adjoint"].asMatrix(ValueForMethodForm = Xa) - HaM = HaM.reshape(len(Xa.A1),len(Y)) # ADAO & check shape + HaM = HaM.reshape(Xa.size,Y.size) # ADAO & check shape HessienneI = [] - nb = len(Xa.A1) + nb = Xa.size for i in range(nb): _ee = numpy.matrix(numpy.zeros(nb)).T _ee[i] = 1. @@ -272,12 +272,14 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): HessienneI = numpy.matrix( HessienneI ) A = HessienneI.I if min(A.shape) != max(A.shape): - raise ValueError("The 3DVAR a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator."%str(A.shape)) + raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(self._name,str(A.shape))) + if (numpy.diag(A) < 0).any(): + raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(self._name,)) if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug try: L = numpy.linalg.cholesky( A ) except: - raise ValueError("The 3DVAR a posteriori covariance matrix A is not symmetric positive-definite. Check your B and R a priori covariances.") + raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(self._name,)) self.StoredVariables["APosterioriCovariance"].store( A ) # # Calculs et/ou stockages supplémentaires @@ -291,9 +293,13 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if "OMB" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["OMB"].store( numpy.ravel(d) ) if "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"]: - self.StoredVariables["SigmaObs2"].store( float( (d.T * (Y-Hm(Xa))) / R.trace() ) ) + if R is not None: + TraceR = R.trace() + elif self._parameters["R_scalar"] is not None: + TraceR = float(self._parameters["R_scalar"]*Y.size) + self.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(Y)).T-numpy.asmatrix(numpy.ravel(Hm(Xa))).T)) ) / TraceR ) if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: - self.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/len(d) ) ) + self.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) ) # logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) diff --git a/src/daComposant/daAlgorithms/Blue.py b/src/daComposant/daAlgorithms/Blue.py index 7c43874..79f4d65 100644 --- a/src/daComposant/daAlgorithms/Blue.py +++ b/src/daComposant/daAlgorithms/Blue.py @@ -110,12 +110,14 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: A = B - K * Hm * B if min(A.shape) != max(A.shape): - raise ValueError("The 3DVAR a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator."%str(A.shape)) + raise ValueError("The %s a posteriori covariance matrix A is of shape %s, despites it has to be a squared matrix. There is an error in the observation operator, please check it."%(self._name,str(A.shape))) + if (numpy.diag(A) < 0).any(): + raise ValueError("The %s a posteriori covariance matrix A has at least one negative value on its diagonal. There is an error in the observation operator, please check it."%(self._name,)) if logging.getLogger().level < logging.WARNING: # La verification n'a lieu qu'en debug try: L = numpy.linalg.cholesky( A ) except: - raise ValueError("The BLUE a posteriori covariance matrix A is not symmetric positive-definite. Check your B and R a priori covariances.") + raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(self._name,)) self.StoredVariables["APosterioriCovariance"].store( A ) # # Calculs et/ou stockages supplémentaires @@ -129,11 +131,15 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if "OMB" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["OMB"].store( numpy.ravel(d) ) if "SigmaObs2" in self._parameters["StoreSupplementaryCalculations"]: - self.StoredVariables["SigmaObs2"].store( float( (d.T * (Y-Hm*Xa)) / R.trace() ) ) + if R is not None: + TraceR = R.trace() + elif self._parameters["R_scalar"] is not None: + TraceR = float(self._parameters["R_scalar"]*Y.size) + self.StoredVariables["SigmaObs2"].store( float( (d.T * (numpy.asmatrix(numpy.ravel(oma)).T)) ) / TraceR ) if "SigmaBck2" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["SigmaBck2"].store( float( (d.T * Hm * (Xa - Xb))/(Hm * B * Hm.T).trace() ) ) if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: - self.StoredVariables["MahalanobisConsistency"].store( float( 2.*J/len(d) ) ) + self.StoredVariables["MahalanobisConsistency"].store( float( 2.*J/d.size ) ) # logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) diff --git a/src/daComposant/daAlgorithms/EnsembleBlue.py b/src/daComposant/daAlgorithms/EnsembleBlue.py index 5b25f69..9fc1281 100644 --- a/src/daComposant/daAlgorithms/EnsembleBlue.py +++ b/src/daComposant/daAlgorithms/EnsembleBlue.py @@ -69,8 +69,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # de la diagonale de R # -------------------------------------------------------------------- DiagonaleR = numpy.diag(R) - EnsembleY = numpy.zeros([len(Y),nb_ens]) - for npar in range(len(DiagonaleR)) : + EnsembleY = numpy.zeros([Y.size,nb_ens]) + for npar in range(DiagonaleR.size) : bruit = numpy.random.normal(0,DiagonaleR[npar],nb_ens) EnsembleY[npar,:] = Y[npar] + bruit EnsembleY = numpy.matrix(EnsembleY) @@ -84,7 +84,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # ------------------------------------------------------------------------ if Y.size <= Xb.valueserie(0).size: if self._parameters["R_scalar"] is not None: - R = self._parameters["R_scalar"] * numpy.eye(len(Y), dtype=numpy.float) + R = self._parameters["R_scalar"] * numpy.eye(Y.size, dtype=numpy.float) K = B * Ha * (Hm * B * Ha + R).I else: K = (Ha * RI * Hm + BI).I * Ha * RI -- 2.30.2