From c7132e7011656e1892230dbe6686179ef14c2757 Mon Sep 17 00:00:00 2001 From: Jean-Philippe ARGAUD Date: Thu, 29 Mar 2012 15:13:12 +0200 Subject: [PATCH] Improvements of inversions in algorithms --- src/daComposant/daAlgorithms/3DVAR.py | 10 ++--- src/daComposant/daAlgorithms/Blue.py | 45 ++++++++++++++----- .../daAlgorithms/LinearLeastSquares.py | 22 ++++++++- .../daAlgorithms/NonLinearLeastSquares.py | 10 ++--- 4 files changed, 66 insertions(+), 21 deletions(-) diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index 4812750..8be3e3a 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -73,8 +73,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): d = Y - HXb logging.debug("%s Innovation d = %s"%(self._name, d)) # - # Précalcul des inversion appellée dans les fonction-coût et gradient - # ------------------------------------------------------------------- + # Précalcul des inversions de B et R + # ---------------------------------- if B is not None: BI = B.I elif Parameters["B_scalar"] is not None: @@ -95,9 +95,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Jb = 0.5 * (_X - Xb).T * BI * (_X - Xb) Jo = 0.5 * (Y - _HX).T * RI * (Y - _HX) J = float( Jb ) + float( Jo ) - logging.info("%s CostFunction Jb = %s"%(self._name, Jb)) - logging.info("%s CostFunction Jo = %s"%(self._name, Jo)) - logging.info("%s CostFunction J = %s"%(self._name, J)) + logging.debug("%s CostFunction Jb = %s"%(self._name, Jb)) + logging.debug("%s CostFunction Jo = %s"%(self._name, Jo)) + logging.debug("%s CostFunction J = %s"%(self._name, J)) self.StoredVariables["CurrentState"].store( _X.A1 ) self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) diff --git a/src/daComposant/daAlgorithms/Blue.py b/src/daComposant/daAlgorithms/Blue.py index ae15c23..8bc8842 100644 --- a/src/daComposant/daAlgorithms/Blue.py +++ b/src/daComposant/daAlgorithms/Blue.py @@ -52,28 +52,53 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): HXb = Hm * Xb HXb = numpy.asmatrix(HXb).flatten().T # - # Calcul de la matrice de gain dans l'espace le plus petit - # -------------------------------------------------------- - if Y.size <= Xb.size: - logging.debug("%s Calcul de K dans l'espace des observations"%self._name) - K = B * Ht * (Hm * B * Ht + R).I - else: - logging.debug("%s Calcul de K dans l'espace d'ébauche"%self._name) - K = (Ht * R.I * Hm + B.I).I * Ht * R.I + # Précalcul des inversions de B et R + # ---------------------------------- + if B is not None: + BI = B.I + elif Parameters["B_scalar"] is not None: + BI = 1.0 / Parameters["B_scalar"] + B = Parameters["B_scalar"] + if R is not None: + RI = R.I + elif Parameters["R_scalar"] is not None: + RI = 1.0 / Parameters["R_scalar"] + R = Parameters["R_scalar"] # - # Calcul de l'innovation et de l'analyse - # -------------------------------------- + # Calcul de l'innovation + # ---------------------- if Y.size != HXb.size: raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size)) if max(Y.shape) != max(HXb.shape): raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape)) d = Y - HXb logging.debug("%s Innovation d = %s"%(self._name, d)) + # + # Calcul de la matrice de gain dans l'espace le plus petit et de l'analyse + # ------------------------------------------------------------------------ + if Y.size <= Xb.size: + logging.debug("%s Calcul de K dans l'espace des observations"%self._name) + K = B * Ht * (Hm * B * Ht + R).I + else: + logging.debug("%s Calcul de K dans l'espace d'ébauche"%self._name) + K = (Ht * RI * Hm + BI).I * Ht * RI Xa = Xb + K*d logging.debug("%s Analyse Xa = %s"%(self._name, Xa)) # + # Calcul de la fonction coût + # -------------------------- + Jb = 0.5 * (Xa - Xb).T * BI * (Xa - Xb) + Jo = 0.5 * d.T * RI * d + J = float( Jb ) + float( Jo ) + logging.debug("%s CostFunction Jb = %s"%(self._name, Jb)) + logging.debug("%s CostFunction Jo = %s"%(self._name, Jo)) + logging.debug("%s CostFunction J = %s"%(self._name, J)) + # self.StoredVariables["Analysis"].store( Xa.A1 ) self.StoredVariables["Innovation"].store( d.A1 ) + self.StoredVariables["CostFunctionJb"].store( Jb ) + self.StoredVariables["CostFunctionJo"].store( Jo ) + self.StoredVariables["CostFunctionJ" ].store( J ) # logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("MB"))) logging.debug("%s Terminé"%self._name) diff --git a/src/daComposant/daAlgorithms/LinearLeastSquares.py b/src/daComposant/daAlgorithms/LinearLeastSquares.py index e7cff56..ba60f1c 100644 --- a/src/daComposant/daAlgorithms/LinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/LinearLeastSquares.py @@ -40,10 +40,30 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Hm = H["Direct"].asMatrix() Ht = H["Adjoint"].asMatrix() # - K = (Ht * R.I * Hm ).I * Ht * R.I + if R is not None: + RI = R.I + elif Parameters["R_scalar"] is not None: + RI = 1.0 / Parameters["R_scalar"] + # + K = (Ht * RI * Hm ).I * Ht * RI Xa = K * Y + logging.debug("%s Analyse Xa = %s"%(self._name, Xa)) + # + # Calcul de la fonction coût + # -------------------------- + d = Y - Hm * Xa + Jb = 0. + Jo = 0.5 * d.T * RI * d + J = float( Jb ) + float( Jo ) + logging.debug("%s CostFunction Jb = %s"%(self._name, Jb)) + logging.debug("%s CostFunction Jo = %s"%(self._name, Jo)) + logging.debug("%s CostFunction J = %s"%(self._name, J)) # self.StoredVariables["Analysis"].store( Xa.A1 ) + self.StoredVariables["Innovation"].store( d.A1 ) + self.StoredVariables["CostFunctionJb"].store( Jb ) + self.StoredVariables["CostFunctionJo"].store( Jo ) + self.StoredVariables["CostFunctionJ" ].store( J ) # logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("Mo"))) logging.debug("%s Terminé"%self._name) diff --git a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py index 27528d4..a01ad3c 100644 --- a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py @@ -74,8 +74,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): d = Y - HXb logging.debug("%s Innovation d = %s"%(self._name, d)) # - # Précalcul des inversion appellée dans les fonction-coût et gradient - # ------------------------------------------------------------------- + # Précalcul des inversions de B et R + # ---------------------------------- # if B is not None: # BI = B.I # elif Parameters["B_scalar"] is not None: @@ -96,9 +96,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Jb = 0. Jo = 0.5 * (Y - _HX).T * RI * (Y - _HX) J = float( Jb ) + float( Jo ) - logging.info("%s CostFunction Jb = %s"%(self._name, Jb)) - logging.info("%s CostFunction Jo = %s"%(self._name, Jo)) - logging.info("%s CostFunction J = %s"%(self._name, J)) + logging.debug("%s CostFunction Jb = %s"%(self._name, Jb)) + logging.debug("%s CostFunction Jo = %s"%(self._name, Jo)) + logging.debug("%s CostFunction J = %s"%(self._name, J)) self.StoredVariables["CurrentState"].store( _X.A1 ) self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) -- 2.39.2