]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
Improvement of documentation and variables output for filters
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Sun, 24 Mar 2019 18:10:15 +0000 (19:10 +0100)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Sun, 24 Mar 2019 18:10:15 +0000 (19:10 +0100)
15 files changed:
doc/en/ref_algorithm_EnsembleKalmanFilter.rst
doc/en/ref_algorithm_ExtendedKalmanFilter.rst
doc/en/ref_algorithm_KalmanFilter.rst
doc/en/snippets/InnovationAtCurrentAnalysis.rst [new file with mode: 0644]
doc/en/snippets/SimulatedObservationAtCurrentAnalysis.rst [new file with mode: 0644]
doc/fr/ref_algorithm_EnsembleKalmanFilter.rst
doc/fr/ref_algorithm_ExtendedKalmanFilter.rst
doc/fr/ref_algorithm_KalmanFilter.rst
doc/fr/snippets/InnovationAtCurrentAnalysis.rst [new file with mode: 0644]
doc/fr/snippets/SimulatedObservationAtCurrentAnalysis.rst [new file with mode: 0644]
src/daComposant/daAlgorithms/EnsembleKalmanFilter.py
src/daComposant/daAlgorithms/ExtendedKalmanFilter.py
src/daComposant/daAlgorithms/KalmanFilter.py
src/daComposant/daAlgorithms/UnscentedKalmanFilter.py
src/daComposant/daCore/BasicObjects.py

index 0497cfac7e90006455604503a8ae93fb7ebe8fda..02e49c2169b9ab6ad4b5fb1a1544041f10352ef7 100644 (file)
@@ -96,8 +96,10 @@ StoreSupplementaryCalculations
   "CurrentOptimum",
   "CurrentState",
   "IndexOfOptimum",
+  "InnovationAtCurrentAnalysis",
   "InnovationAtCurrentState",
   "PredictedState",
+  "SimulatedObservationAtCurrentAnalysis",
   "SimulatedObservationAtCurrentOptimum",
   "SimulatedObservationAtCurrentState",
   ].
@@ -143,10 +145,14 @@ StoreSupplementaryCalculations
 
 .. include:: snippets/IndexOfOptimum.rst
 
+.. include:: snippets/InnovationAtCurrentAnalysis.rst
+
 .. include:: snippets/InnovationAtCurrentState.rst
 
 .. include:: snippets/PredictedState.rst
 
+.. include:: snippets/SimulatedObservationAtCurrentAnalysis.rst
+
 .. include:: snippets/SimulatedObservationAtCurrentOptimum.rst
 
 .. include:: snippets/SimulatedObservationAtCurrentState.rst
index 2a082a7b308cbd683526dda15e4b192eb3d38220..979254dbceb2ffa4e1d60e8ea6ccf131a75f1491 100644 (file)
@@ -90,8 +90,10 @@ StoreSupplementaryCalculations
   "CurrentOptimum",
   "CurrentState",
   "IndexOfOptimum",
+  "InnovationAtCurrentAnalysis",
   "InnovationAtCurrentState",
   "PredictedState",
+  "SimulatedObservationAtCurrentAnalysis",
   "SimulatedObservationAtCurrentOptimum",
   "SimulatedObservationAtCurrentState",
   ].
@@ -137,10 +139,14 @@ StoreSupplementaryCalculations
 
 .. include:: snippets/IndexOfOptimum.rst
 
+.. include:: snippets/InnovationAtCurrentAnalysis.rst
+
 .. include:: snippets/InnovationAtCurrentState.rst
 
 .. include:: snippets/PredictedState.rst
 
+.. include:: snippets/SimulatedObservationAtCurrentAnalysis.rst
+
 .. include:: snippets/SimulatedObservationAtCurrentOptimum.rst
 
 .. include:: snippets/SimulatedObservationAtCurrentState.rst
index 7c52db11245d869ce91b62dbf2ad677e294bee48..811ddffb1ff806dedebf4f2415f6d1597b4fbc76 100644 (file)
@@ -89,8 +89,10 @@ StoreSupplementaryCalculations
   "CurrentOptimum",
   "CurrentState",
   "IndexOfOptimum",
+  "InnovationAtCurrentAnalysis",
   "InnovationAtCurrentState",
   "PredictedState",
+  "SimulatedObservationAtCurrentAnalysis",
   "SimulatedObservationAtCurrentOptimum",
   "SimulatedObservationAtCurrentState",
   ].
@@ -136,10 +138,14 @@ StoreSupplementaryCalculations
 
 .. include:: snippets/IndexOfOptimum.rst
 
+.. include:: snippets/InnovationAtCurrentAnalysis.rst
+
 .. include:: snippets/InnovationAtCurrentState.rst
 
 .. include:: snippets/PredictedState.rst
 
+.. include:: snippets/SimulatedObservationAtCurrentAnalysis.rst
+
 .. include:: snippets/SimulatedObservationAtCurrentOptimum.rst
 
 .. include:: snippets/SimulatedObservationAtCurrentState.rst
diff --git a/doc/en/snippets/InnovationAtCurrentAnalysis.rst b/doc/en/snippets/InnovationAtCurrentAnalysis.rst
new file mode 100644 (file)
index 0000000..60249a5
--- /dev/null
@@ -0,0 +1,9 @@
+.. index:: single: InnovationAtCurrentAnalysis
+
+InnovationAtCurrentAnalysis
+  *List of vectors*. Each element is an innovation vector at current analysis.
+  This quantity is identical to the innovation vector at current state in the
+  case of a single-state assimilation.
+
+  Example:
+  ``ds = ADD.get("InnovationAtCurrentAnalysis")[-1]``
diff --git a/doc/en/snippets/SimulatedObservationAtCurrentAnalysis.rst b/doc/en/snippets/SimulatedObservationAtCurrentAnalysis.rst
new file mode 100644 (file)
index 0000000..3687599
--- /dev/null
@@ -0,0 +1,10 @@
+.. index:: single: SimulatedObservationAtCurrentAnalysis
+
+SimulatedObservationAtCurrentAnalysis
+  *List of vectors*. Each element is an observed vector simulated by the
+  observation operator from the current analysis, that is, in the observation
+  space. This quantity is identical to the observed vector simulated at
+  current state in the case of a single-state assimilation.
+
+  Example:
+  ``hxs = ADD.get("SimulatedObservationAtCurrentAnalysis")[-1]``
index 6b55b69c5ddcf3ae3664e3d0382923e25b4f07ed..1049d1fb7d1a1d666b8705444cdf75d739e71d87 100644 (file)
@@ -97,8 +97,10 @@ StoreSupplementaryCalculations
   "CurrentOptimum",
   "CurrentState",
   "IndexOfOptimum",
+  "InnovationAtCurrentAnalysis",
   "InnovationAtCurrentState",
   "PredictedState",
+  "SimulatedObservationAtCurrentAnalysis",
   "SimulatedObservationAtCurrentOptimum",
   "SimulatedObservationAtCurrentState",
   ].
@@ -144,10 +146,14 @@ StoreSupplementaryCalculations
 
 .. include:: snippets/IndexOfOptimum.rst
 
+.. include:: snippets/InnovationAtCurrentAnalysis.rst
+
 .. include:: snippets/InnovationAtCurrentState.rst
 
 .. include:: snippets/PredictedState.rst
 
+.. include:: snippets/SimulatedObservationAtCurrentAnalysis.rst
+
 .. include:: snippets/SimulatedObservationAtCurrentOptimum.rst
 
 .. include:: snippets/SimulatedObservationAtCurrentState.rst
index 5a3a3babf632a89a1424488c9151a36f6e91dc97..6e808635e15f99f04b44c17078b107d56dbbf9d8 100644 (file)
@@ -91,8 +91,10 @@ StoreSupplementaryCalculations
   "CurrentOptimum",
   "CurrentState",
   "IndexOfOptimum",
+  "InnovationAtCurrentAnalysis",
   "InnovationAtCurrentState",
   "PredictedState",
+  "SimulatedObservationAtCurrentAnalysis",
   "SimulatedObservationAtCurrentOptimum",
   "SimulatedObservationAtCurrentState",
   ].
@@ -138,10 +140,14 @@ StoreSupplementaryCalculations
 
 .. include:: snippets/IndexOfOptimum.rst
 
+.. include:: snippets/InnovationAtCurrentAnalysis.rst
+
 .. include:: snippets/InnovationAtCurrentState.rst
 
 .. include:: snippets/PredictedState.rst
 
+.. include:: snippets/SimulatedObservationAtCurrentAnalysis.rst
+
 .. include:: snippets/SimulatedObservationAtCurrentOptimum.rst
 
 .. include:: snippets/SimulatedObservationAtCurrentState.rst
index 1f92ad3ce597fd1b8baf85e44ec1a0186db01dc6..ea859975e6dde5ff61e51636a3f1dd8317071cce 100644 (file)
@@ -89,8 +89,10 @@ StoreSupplementaryCalculations
   "CurrentOptimum",
   "CurrentState",
   "IndexOfOptimum",
+  "InnovationAtCurrentAnalysis",
   "InnovationAtCurrentState",
   "PredictedState",
+  "SimulatedObservationAtCurrentAnalysis",
   "SimulatedObservationAtCurrentOptimum",
   "SimulatedObservationAtCurrentState",
   ].
@@ -136,10 +138,14 @@ StoreSupplementaryCalculations
 
 .. include:: snippets/IndexOfOptimum.rst
 
+.. include:: snippets/InnovationAtCurrentAnalysis.rst
+
 .. include:: snippets/InnovationAtCurrentState.rst
 
 .. include:: snippets/PredictedState.rst
 
+.. include:: snippets/SimulatedObservationAtCurrentAnalysis.rst
+
 .. include:: snippets/SimulatedObservationAtCurrentOptimum.rst
 
 .. include:: snippets/SimulatedObservationAtCurrentState.rst
diff --git a/doc/fr/snippets/InnovationAtCurrentAnalysis.rst b/doc/fr/snippets/InnovationAtCurrentAnalysis.rst
new file mode 100644 (file)
index 0000000..95ed07d
--- /dev/null
@@ -0,0 +1,9 @@
+.. index:: single: InnovationAtCurrentAnalysis
+
+InnovationAtCurrentAnalysis
+  *Liste de vecteurs*. Chaque élément est un vecteur d'innovation à l'état
+  analysé courant. Cette quantité est identique au vecteur d'innovation à
+  l'état courant dans le cas d'une assimilation mono-état.
+
+  Exemple :
+  ``ds = ADD.get("InnovationAtCurrentAnalysis")[-1]``
diff --git a/doc/fr/snippets/SimulatedObservationAtCurrentAnalysis.rst b/doc/fr/snippets/SimulatedObservationAtCurrentAnalysis.rst
new file mode 100644 (file)
index 0000000..5bead53
--- /dev/null
@@ -0,0 +1,11 @@
+.. index:: single: SimulatedObservationAtCurrentAnalysis
+
+SimulatedObservationAtCurrentAnalysis
+  *Liste de vecteurs*. Chaque élément est un vecteur d'observation simulé par
+  l'opérateur d'observation à partir de l'état courant, c'est-à-dire dans
+  l'espace des observations. Cette quantité est identique au vecteur
+  d'observation simulé à l'état courant dans le cas d'une assimilation
+  mono-état.
+
+  Exemple :
+  ``hxs = ADD.get("SimulatedObservationAtCurrentAnalysis")[-1]``
index 4a8449347d98003b7547a00be987bb6635419091..19e808e1714e4c866275fdbcd18eab7a2f022ce3 100644 (file)
@@ -75,8 +75,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 "CurrentOptimum",
                 "CurrentState",
                 "IndexOfOptimum",
+                "InnovationAtCurrentAnalysis",
                 "InnovationAtCurrentState",
                 "PredictedState",
+                "SimulatedObservationAtCurrentAnalysis",
                 "SimulatedObservationAtCurrentOptimum",
                 "SimulatedObservationAtCurrentState",
                 ]
@@ -119,6 +121,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             or self._toStore("CostFunctionJ") \
             or self._toStore("CostFunctionJb") \
             or self._toStore("CostFunctionJo") \
+            or self._toStore("CurrentOptimum") \
             or self._toStore("APosterioriCovariance"):
             BI = B.getI()
             RI = R.getI()
@@ -141,7 +144,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         if self._toStore("APosterioriCovariance"):
             self.StoredVariables["APosterioriCovariance"].store( Pn )
             covarianceXa = Pn
-        Xa               = Xb
+        Xa = XaMin       = Xb
         previousJMinimum = numpy.finfo(float).max
         #
         # Predimensionnement
@@ -197,7 +200,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             #
             for i in range(__m):
                 Xn[:,i] = Xn_predicted[:,i] + K * (Yo[:,i] - HX_predicted[:,i])
-            del Yo, PfHT, HPfHT
+            del PfHT, HPfHT
             #
             Xa = Xn.mean(axis=1, dtype=mfp)
             #
@@ -206,24 +209,32 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 or self._toStore("CostFunctionJb") \
                 or self._toStore("CostFunctionJo") \
                 or self._toStore("APosterioriCovariance") \
-                or self._toStore("InnovationAtCurrentState") \
-                or self._toStore("SimulatedObservationAtCurrentState") \
+                or self._toStore("InnovationAtCurrentAnalysis") \
+                or self._toStore("SimulatedObservationAtCurrentAnalysis") \
                 or self._toStore("SimulatedObservationAtCurrentOptimum"):
-                _HX          = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
-                _Innovation = Ynpu - _HX
+                _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
+                _Innovation = Ynpu - _HXa
             #
+            # ---> avec analysis
             self.StoredVariables["Analysis"].store( Xa )
+            if self._toStore("SimulatedObservationAtCurrentAnalysis"):
+                self.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
+            if self._toStore("InnovationAtCurrentAnalysis"):
+                self.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
+            # ---> avec current state
             if self._parameters["StoreInternalVariables"] \
-                or self._toStore("CurrentState") \
-                or self._toStore("CurrentOptimum"):
-                self.StoredVariables["CurrentState"].store( Xa )
+                or self._toStore("CurrentState"):
+                self.StoredVariables["CurrentState"].store( Xn )
+            if self._toStore("PredictedState"):
+                self.StoredVariables["PredictedState"].store( Xn_predicted )
             if self._toStore("BMA"):
                 self.StoredVariables["BMA"].store( Xn_predicted - Xa )
             if self._toStore("InnovationAtCurrentState"):
-                self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
+                self.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
             if self._toStore("SimulatedObservationAtCurrentState") \
                 or self._toStore("SimulatedObservationAtCurrentOptimum"):
-                self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
+                self.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
+            # ---> autres
             if self._parameters["StoreInternalVariables"] \
                 or self._toStore("CostFunctionJ") \
                 or self._toStore("CostFunctionJb") \
@@ -247,9 +258,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 if self._toStore("IndexOfOptimum"):
                     self.StoredVariables["IndexOfOptimum"].store( IndexMin )
                 if self._toStore("CurrentOptimum"):
-                    self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
+                    self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["Analysis"][IndexMin] )
                 if self._toStore("SimulatedObservationAtCurrentOptimum"):
-                    self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
+                    self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
                 if self._toStore("CostFunctionJbAtCurrentOptimum"):
                     self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
                 if self._toStore("CostFunctionJoAtCurrentOptimum"):
@@ -265,20 +276,21 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 Pf = (1./(__m-1)) * Pf
                 Pn = (1. - K * Ht) * Pf
                 self.StoredVariables["APosterioriCovariance"].store( Pn )
-                if self._parameters["EstimationOf"] == "Parameters" \
-                    and J < previousJMinimum:
-                    previousJMinimum  = J
-                    # Inutile ici : Xa = Xa
-                    covarianceXa = Pn
+            if self._parameters["EstimationOf"] == "Parameters" \
+                and J < previousJMinimum:
+                previousJMinimum    = J
+                XaMin               = Xa
+                if self._toStore("APosterioriCovariance"):
+                    covarianceXaMin = Pn
         #
         # Stockage final supplémentaire de l'optimum en estimation de paramètres
         # ----------------------------------------------------------------------
         if self._parameters["EstimationOf"] == "Parameters":
-            self.StoredVariables["Analysis"].store( Xa.A1 )
+            self.StoredVariables["Analysis"].store( XaMin )
             if self._toStore("APosterioriCovariance"):
-                self.StoredVariables["APosterioriCovariance"].store( covarianceXa )
+                self.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
             if self._toStore("BMA"):
-                self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
+                self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
         #
         self._post_run(HO)
         return 0
index 5670a6bd74c83b49c3cace59f316199eb5e24ebe..ecd5dacb700896918e76f6e7b6b72ec0486440ed 100644 (file)
@@ -69,8 +69,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 "CurrentOptimum",
                 "CurrentState",
                 "IndexOfOptimum",
+                "InnovationAtCurrentAnalysis",
                 "InnovationAtCurrentState",
                 "PredictedState",
+                "SimulatedObservationAtCurrentAnalysis",
                 "SimulatedObservationAtCurrentOptimum",
                 "SimulatedObservationAtCurrentState",
                 ]
@@ -129,7 +131,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         if self._toStore("APosterioriCovariance"):
             self.StoredVariables["APosterioriCovariance"].store( Pn.asfullmatrix(Xn.size) )
             covarianceXa = Pn
-        Xa               = Xn
+        Xa = XaMin       = Xn
         previousJMinimum = numpy.finfo(float).max
         #
         for step in range(duration-1):
@@ -188,38 +190,40 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             Xn = Xn_predicted + Pn_predicted * Ha * _u
             Kn = Pn_predicted * Ha * (R + numpy.dot(Ht, Pn_predicted * Ha)).I
             Pn = Pn_predicted - Kn * Ht * Pn_predicted
+            Xa, _HXa = Xn, _HX # Pointeurs
             #
-            self.StoredVariables["Analysis"].store( Xn )
+            # ---> avec analysis
+            self.StoredVariables["Analysis"].store( Xa )
+            if self._toStore("SimulatedObservationAtCurrentAnalysis"):
+                self.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
+            if self._toStore("InnovationAtCurrentAnalysis"):
+                self.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
+            # ---> avec current state
             if self._parameters["StoreInternalVariables"] \
-                or self._toStore("CurrentState") \
-                or self._toStore("CurrentOptimum"):
+                or self._toStore("CurrentState"):
                 self.StoredVariables["CurrentState"].store( Xn )
             if self._toStore("PredictedState"):
                 self.StoredVariables["PredictedState"].store( Xn_predicted )
             if self._toStore("BMA"):
-                self.StoredVariables["BMA"].store( Xn_predicted - Xn )
+                self.StoredVariables["BMA"].store( Xn_predicted - Xa )
             if self._toStore("InnovationAtCurrentState"):
                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
             if self._toStore("SimulatedObservationAtCurrentState") \
                 or self._toStore("SimulatedObservationAtCurrentOptimum"):
                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
+            # ---> autres
             if self._parameters["StoreInternalVariables"] \
                 or self._toStore("CostFunctionJ") \
                 or self._toStore("CostFunctionJb") \
                 or self._toStore("CostFunctionJo") \
                 or self._toStore("CurrentOptimum") \
                 or self._toStore("APosterioriCovariance"):
-                Jb  = float( 0.5 * (Xn - Xb).T * BI * (Xn - Xb) )
+                Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
                 Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
                 J   = Jb + Jo
                 self.StoredVariables["CostFunctionJb"].store( Jb )
                 self.StoredVariables["CostFunctionJo"].store( Jo )
                 self.StoredVariables["CostFunctionJ" ].store( J )
-                if self._parameters["EstimationOf"] == "Parameters" \
-                    and J < previousJMinimum:
-                    previousJMinimum  = J
-                    Xa                = Xn
-                    if self._toStore("APosterioriCovariance"): covarianceXa = Pn
                 #
                 if self._toStore("IndexOfOptimum") \
                     or self._toStore("CurrentOptimum") \
@@ -231,9 +235,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 if self._toStore("IndexOfOptimum"):
                     self.StoredVariables["IndexOfOptimum"].store( IndexMin )
                 if self._toStore("CurrentOptimum"):
-                    self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
+                    self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["Analysis"][IndexMin] )
                 if self._toStore("SimulatedObservationAtCurrentOptimum"):
-                    self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
+                    self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
                 if self._toStore("CostFunctionJbAtCurrentOptimum"):
                     self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
                 if self._toStore("CostFunctionJoAtCurrentOptimum"):
@@ -242,15 +246,21 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                     self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
             if self._toStore("APosterioriCovariance"):
                 self.StoredVariables["APosterioriCovariance"].store( Pn )
+            if self._parameters["EstimationOf"] == "Parameters" \
+                and J < previousJMinimum:
+                previousJMinimum    = J
+                XaMin               = Xa
+                if self._toStore("APosterioriCovariance"):
+                    covarianceXaMin = Pn
         #
         # Stockage final supplémentaire de l'optimum en estimation de paramètres
         # ----------------------------------------------------------------------
         if self._parameters["EstimationOf"] == "Parameters":
-            self.StoredVariables["Analysis"].store( Xa.A1 )
+            self.StoredVariables["Analysis"].store( XaMin )
             if self._toStore("APosterioriCovariance"):
-                self.StoredVariables["APosterioriCovariance"].store( covarianceXa )
+                self.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
             if self._toStore("BMA"):
-                self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
+                self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
         #
         self._post_run(HO)
         return 0
index e8115b4e6ca4b934d112eed07e7beb62262865ac..bde6935e4db019f7a0576f7d3945ba4f8e56dffb 100644 (file)
@@ -62,8 +62,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 "CurrentOptimum",
                 "CurrentState",
                 "IndexOfOptimum",
+                "InnovationAtCurrentAnalysis",
                 "InnovationAtCurrentState",
                 "PredictedState",
+                "SimulatedObservationAtCurrentAnalysis",
                 "SimulatedObservationAtCurrentOptimum",
                 "SimulatedObservationAtCurrentState",
                 ]
@@ -164,38 +166,40 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             Xn = Xn_predicted + Pn_predicted * Ha * _u
             Kn = Pn_predicted * Ha * (R + numpy.dot(Ht, Pn_predicted * Ha)).I
             Pn = Pn_predicted - Kn * Ht * Pn_predicted
+            Xa, _HXa = Xn, _HX # Pointeurs
             #
-            self.StoredVariables["Analysis"].store( Xn )
+            # ---> avec analysis
+            self.StoredVariables["Analysis"].store( Xa )
+            if self._toStore("SimulatedObservationAtCurrentAnalysis"):
+                self.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
+            if self._toStore("InnovationAtCurrentAnalysis"):
+                self.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
+            # ---> avec current state
             if self._parameters["StoreInternalVariables"] \
-                or self._toStore("CurrentState") \
-                or self._toStore("CurrentOptimum"):
+                or self._toStore("CurrentState"):
                 self.StoredVariables["CurrentState"].store( Xn )
             if self._toStore("PredictedState"):
                 self.StoredVariables["PredictedState"].store( Xn_predicted )
             if self._toStore("BMA"):
-                self.StoredVariables["BMA"].store( Xn_predicted - Xn )
+                self.StoredVariables["BMA"].store( Xn_predicted - Xa )
             if self._toStore("InnovationAtCurrentState"):
                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
             if self._toStore("SimulatedObservationAtCurrentState") \
                 or self._toStore("SimulatedObservationAtCurrentOptimum"):
                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
+            # ---> autres
             if self._parameters["StoreInternalVariables"] \
                 or self._toStore("CostFunctionJ") \
                 or self._toStore("CostFunctionJb") \
                 or self._toStore("CostFunctionJo") \
                 or self._toStore("CurrentOptimum") \
                 or self._toStore("APosterioriCovariance"):
-                Jb  = float( 0.5 * (Xn - Xb).T * BI * (Xn - Xb) )
+                Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
                 Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
                 J   = Jb + Jo
                 self.StoredVariables["CostFunctionJb"].store( Jb )
                 self.StoredVariables["CostFunctionJo"].store( Jo )
                 self.StoredVariables["CostFunctionJ" ].store( J )
-                if self._parameters["EstimationOf"] == "Parameters" \
-                    and J < previousJMinimum:
-                    previousJMinimum  = J
-                    Xa                = Xn
-                    if self._toStore("APosterioriCovariance"): covarianceXa = Pn
                 #
                 if self._toStore("IndexOfOptimum") \
                     or self._toStore("CurrentOptimum") \
@@ -207,9 +211,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 if self._toStore("IndexOfOptimum"):
                     self.StoredVariables["IndexOfOptimum"].store( IndexMin )
                 if self._toStore("CurrentOptimum"):
-                    self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] )
+                    self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["Analysis"][IndexMin] )
                 if self._toStore("SimulatedObservationAtCurrentOptimum"):
-                    self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] )
+                    self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
                 if self._toStore("CostFunctionJbAtCurrentOptimum"):
                     self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
                 if self._toStore("CostFunctionJoAtCurrentOptimum"):
@@ -218,15 +222,21 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                     self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
             if self._toStore("APosterioriCovariance"):
                 self.StoredVariables["APosterioriCovariance"].store( Pn )
+            if self._parameters["EstimationOf"] == "Parameters" \
+                and J < previousJMinimum:
+                previousJMinimum    = J
+                XaMin               = Xa
+                if self._toStore("APosterioriCovariance"):
+                    covarianceXaMin = Pn
         #
         # Stockage final supplémentaire de l'optimum en estimation de paramètres
         # ----------------------------------------------------------------------
         if self._parameters["EstimationOf"] == "Parameters":
-            self.StoredVariables["Analysis"].store( Xa.A1 )
+            self.StoredVariables["Analysis"].store( XaMin )
             if self._toStore("APosterioriCovariance"):
-                self.StoredVariables["APosterioriCovariance"].store( covarianceXa )
+                self.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
             if self._toStore("BMA"):
-                self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
+                self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
         #
         self._post_run(HO)
         return 0
index bb3da550645dd38e7d6b383fc0cc6e0b84826c4d..f4fc2fb9ddab104d992a277603a99281f5cedccf 100644 (file)
@@ -172,7 +172,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         if self._toStore("APosterioriCovariance"):
             self.StoredVariables["APosterioriCovariance"].store( Pn )
             covarianceXa = Pn
-        Xa               = Xn
+        Xa = XaMin       = Xb
         previousJMinimum = numpy.finfo(float).max
         #
         for step in range(duration-1):
@@ -268,12 +268,15 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
                 Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1)
                 Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1)
+            Xa = Xn # Pointeurs
             #
-            self.StoredVariables["Analysis"].store( Xn.A1 )
+            # ---> avec analysis
+            self.StoredVariables["Analysis"].store( Xa )
             if self._toStore("APosterioriCovariance"):
                 self.StoredVariables["APosterioriCovariance"].store( Pn )
+            # ---> avec current state
             if self._toStore("InnovationAtCurrentState"):
-                self.StoredVariables["InnovationAtCurrentState"].store( numpy.ravel( d.A1 ) )
+                self.StoredVariables["InnovationAtCurrentState"].store( d )
             if self._parameters["StoreInternalVariables"] \
                 or self._toStore("CurrentState"):
                 self.StoredVariables["CurrentState"].store( Xn )
@@ -281,28 +284,27 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 or self._toStore("CostFunctionJ") \
                 or self._toStore("CostFunctionJb") \
                 or self._toStore("CostFunctionJo"):
-                Jb  = 0.5 * (Xn - Xb).T * BI * (Xn - Xb)
-                Jo  = 0.5 * d.T * RI * d
-                J   = float( Jb ) + float( Jo )
+                Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
+                Jo  = float( 0.5 * d.T * RI * d )
+                J   = Jb + Jo
                 self.StoredVariables["CostFunctionJb"].store( Jb )
                 self.StoredVariables["CostFunctionJo"].store( Jo )
                 self.StoredVariables["CostFunctionJ" ].store( J )
-                if J < previousJMinimum:
-                    previousJMinimum  = J
-                    Xa                = Xn
-                    if self._toStore("APosterioriCovariance"):
-                        covarianceXa  = Pn
-            else:
-                Xa = Xn
+            if self._parameters["EstimationOf"] == "Parameters" \
+                and J < previousJMinimum:
+                previousJMinimum    = J
+                XaMin               = Xa
+                if self._toStore("APosterioriCovariance"):
+                    covarianceXaMin = Pn
         #
-        # Stockage supplementaire de l'optimum en estimation de parametres
-        # ----------------------------------------------------------------
+        # Stockage final supplémentaire de l'optimum en estimation de paramètres
+        # ----------------------------------------------------------------------
         if self._parameters["EstimationOf"] == "Parameters":
-            self.StoredVariables["Analysis"].store( Xa.A1 )
+            self.StoredVariables["Analysis"].store( XaMin )
             if self._toStore("APosterioriCovariance"):
-                self.StoredVariables["APosterioriCovariance"].store( covarianceXa )
+                self.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
             if self._toStore("BMA"):
-                self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
+                self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
         #
         self._post_run(HO)
         return 0
index 0099c210e28a5bd89066e45189ccf1be99aef06f..3237c13657e145921da5ac7c3c49b6f1371da301 100644 (file)
@@ -628,6 +628,7 @@ class Algorithm(object):
         self.StoredVariables["GradientOfCostFunctionJo"]             = Persistence.OneVector(name = "GradientOfCostFunctionJo")
         self.StoredVariables["IndexOfOptimum"]                       = Persistence.OneIndex(name = "IndexOfOptimum")
         self.StoredVariables["Innovation"]                           = Persistence.OneVector(name = "Innovation")
+        self.StoredVariables["InnovationAtCurrentAnalysis"]          = Persistence.OneVector(name = "InnovationAtCurrentAnalysis")
         self.StoredVariables["InnovationAtCurrentState"]             = Persistence.OneVector(name = "InnovationAtCurrentState")
         self.StoredVariables["JacobianMatrixAtBackground"]           = Persistence.OneMatrix(name = "JacobianMatrixAtBackground")
         self.StoredVariables["JacobianMatrixAtCurrentState"]         = Persistence.OneMatrix(name = "JacobianMatrixAtCurrentState")
@@ -641,6 +642,7 @@ class Algorithm(object):
         self.StoredVariables["SigmaBck2"]                            = Persistence.OneScalar(name = "SigmaBck2")
         self.StoredVariables["SigmaObs2"]                            = Persistence.OneScalar(name = "SigmaObs2")
         self.StoredVariables["SimulatedObservationAtBackground"]     = Persistence.OneVector(name = "SimulatedObservationAtBackground")
+        self.StoredVariables["SimulatedObservationAtCurrentAnalysis"]= Persistence.OneVector(name = "SimulatedObservationAtCurrentAnalysis")
         self.StoredVariables["SimulatedObservationAtCurrentOptimum"] = Persistence.OneVector(name = "SimulatedObservationAtCurrentOptimum")
         self.StoredVariables["SimulatedObservationAtCurrentState"]   = Persistence.OneVector(name = "SimulatedObservationAtCurrentState")
         self.StoredVariables["SimulatedObservationAtOptimum"]        = Persistence.OneVector(name = "SimulatedObservationAtOptimum")