]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
Minor internal modifications and performance improvements
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Tue, 20 Jul 2021 17:42:23 +0000 (19:42 +0200)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Tue, 20 Jul 2021 17:42:23 +0000 (19:42 +0200)
doc/en/ref_algorithm_ExtendedKalmanFilter.rst
doc/en/ref_algorithm_UnscentedKalmanFilter.rst
doc/en/snippets/Variant_EKF.rst [new file with mode: 0644]
doc/fr/ref_algorithm_ExtendedKalmanFilter.rst
doc/fr/ref_algorithm_UnscentedKalmanFilter.rst
doc/fr/snippets/Variant_EKF.rst [new file with mode: 0644]
src/daComposant/daAlgorithms/ExtendedKalmanFilter.py
src/daComposant/daAlgorithms/UnscentedKalmanFilter.py
src/daComposant/daCore/NumericObjects.py

index c4429c5fb7e8de95d06793a299cbfbfd85b13558..cc45fe98c9193279e8d0a284e4bd824a3ce32729 100644 (file)
@@ -77,7 +77,7 @@ the operators with the help of the :ref:`section_ref_algorithm_LinearityTest`.
 .. ------------------------------------ ..
 .. include:: snippets/Header2Algo03AdOp.rst
 
-.. include:: snippets/BoundsWithExtremes.rst
+.. include:: snippets/BoundsWithNone.rst
 
 .. include:: snippets/ConstrainedBy.rst
 
@@ -120,6 +120,8 @@ StoreSupplementaryCalculations
   Example :
   ``{"StoreSupplementaryCalculations":["BMA", "CurrentState"]}``
 
+.. include:: snippets/Variant_EKF.rst
+
 .. ------------------------------------ ..
 .. include:: snippets/Header2Algo04.rst
 
index 519b49905a7151e0f4e1a3838c721b0348e21223..eeef062c8578b1695892e53d164892d04813d3c2 100644 (file)
@@ -72,7 +72,7 @@ with the help of the :ref:`section_ref_algorithm_LinearityTest`.
 .. ------------------------------------ ..
 .. include:: snippets/Header2Algo03AdOp.rst
 
-.. include:: snippets/BoundsWithExtremes.rst
+.. include:: snippets/BoundsWithNone.rst
 
 .. include:: snippets/ConstrainedBy.rst
 
diff --git a/doc/en/snippets/Variant_EKF.rst b/doc/en/snippets/Variant_EKF.rst
new file mode 100644 (file)
index 0000000..4cf6547
--- /dev/null
@@ -0,0 +1,14 @@
+.. index::
+    single: Variant
+    pair: Variant ; EKF
+    pair: Variant ; CEKF
+
+Variant
+  *Predifined name*. This key allows to choose one of the possible variants for
+  the main algorithm. The default variant is the constrained version "CEKF" of
+  the original algorithm "EKF", and the possible choices are
+  "EKF" (Extended Kalman Filter),
+  "CEKF" (Constrained Extended Kalman Filter).
+
+  Exemple :
+  ``{"Variant":"CEKF"}``
index bde01ed7d7f7c141a1ca2a653a88b54d850026bd..58ab95444493537fb32c7dc24c5ae8810ef9d9fa 100644 (file)
@@ -78,7 +78,7 @@ l':ref:`section_ref_algorithm_LinearityTest`.
 .. ------------------------------------ ..
 .. include:: snippets/Header2Algo03AdOp.rst
 
-.. include:: snippets/BoundsWithExtremes.rst
+.. include:: snippets/BoundsWithNone.rst
 
 .. include:: snippets/ConstrainedBy.rst
 
@@ -122,6 +122,8 @@ StoreSupplementaryCalculations
   Exemple :
   ``{"StoreSupplementaryCalculations":["BMA", "CurrentState"]}``
 
+.. include:: snippets/Variant_EKF.rst
+
 .. ------------------------------------ ..
 .. include:: snippets/Header2Algo04.rst
 
index 0e525519f16251d9b9b670cfa7cc5522263d1437..e873868d8a3ce1083b3393beea410e2334b05b0f 100644 (file)
@@ -73,7 +73,7 @@ opérateurs à l'aide de l':ref:`section_ref_algorithm_LinearityTest`.
 .. ------------------------------------ ..
 .. include:: snippets/Header2Algo03AdOp.rst
 
-.. include:: snippets/BoundsWithExtremes.rst
+.. include:: snippets/BoundsWithNone.rst
 
 .. include:: snippets/ConstrainedBy.rst
 
diff --git a/doc/fr/snippets/Variant_EKF.rst b/doc/fr/snippets/Variant_EKF.rst
new file mode 100644 (file)
index 0000000..5c97ed8
--- /dev/null
@@ -0,0 +1,14 @@
+.. index::
+    single: Variant
+    pair: Variant ; EKF
+    pair: Variant ; CEKF
+
+Variant
+  *Nom prédéfini*. Cette clé permet de choisir l'une des variantes possibles
+  pour l'algorithme principal. La variante par défaut est la version contrainte
+  "CEKF" de l'algorithme original "EKF", et les choix possibles sont
+  "EKF" (Extended Kalman Filter),
+  "CEKF" (Constrained Extended Kalman Filter).
+
+  Exemple :
+  ``{"Variant":"CEKF"}``
index 95cd3e3dfcf9db5cb510e18cd3649572542b43ca..a34a997a3db2c200f873a445aa55bc2536c93f0f 100644 (file)
@@ -29,13 +29,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         BasicObjects.Algorithm.__init__(self, "EXTENDEDKALMANFILTER")
         self.defineRequiredParameter(
             name     = "Variant",
-            default  = "EKF",
+            default  = "CEKF",
             typecast = str,
             message  = "Variant ou formulation de la méthode",
             listval  = [
                 "EKF",
-                ],
-            listadv  = [
                 "CEKF",
                 ],
             )
@@ -109,8 +107,19 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
         #
         #--------------------------
-        NumericObjects.exkf(self, Xb, Y, U, HO, EM, CM, R, B, Q)
+        # Default EKF
+        #--------------------------
+        if   self._parameters["Variant"] == "EKF":
+            NumericObjects.exkf(self, Xb, Y, U, HO, EM, CM, R, B, Q)
+        #
+        #--------------------------
+        # Default CEKF
+        elif self._parameters["Variant"] == "CEKF":
+            NumericObjects.cekf(self, Xb, Y, U, HO, EM, CM, R, B, Q)
+        #
         #--------------------------
+        else:
+            raise ValueError("Error in Variant name: %s"%self._parameters["Variant"])
         #
         self._post_run(HO)
         return 0
index 6c497f8647bd065c20d6f8c6b841be8acb6c19d1..a8904ba0261fde19c9a311c2d4fcd7ba76ed4721 100644 (file)
@@ -101,11 +101,22 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 "APosterioriVariances",
                 "BMA",
                 "CostFunctionJ",
+                "CostFunctionJAtCurrentOptimum",
                 "CostFunctionJb",
+                "CostFunctionJbAtCurrentOptimum",
                 "CostFunctionJo",
+                "CostFunctionJoAtCurrentOptimum",
                 "CurrentIterationNumber",
+                "CurrentOptimum",
                 "CurrentState",
+                "ForecastCovariance",
+                "ForecastState",
+                "IndexOfOptimum",
+                "InnovationAtCurrentAnalysis",
                 "InnovationAtCurrentState",
+                "SimulatedObservationAtCurrentAnalysis",
+                "SimulatedObservationAtCurrentOptimum",
+                "SimulatedObservationAtCurrentState",
                 ]
             )
         self.defineRequiredParameter( # Pas de type
index 2813a3045014c287272412c31798b2ab666545e4..2c862af4cb7ca74edb102471fc1fbd1cd362d0dd 100644 (file)
@@ -710,12 +710,12 @@ def QuantilesEstimations(selfA, A, Xa, HXa = None, Hm = None, HtM = None):
 
 # ==============================================================================
 def ForceNumericBounds( __Bounds ):
-    "Force les bornes à être des valeurs numériques, sauf si None"
+    "Force les bornes à être des valeurs numériques, sauf si globalement None"
     # Conserve une valeur par défaut à None s'il n'y a pas de bornes
     if __Bounds is None: return None
     # Converti toutes les bornes individuelles None à +/- l'infini
     __Bounds = numpy.asarray( __Bounds, dtype=float )
-    if len(__Bounds.shape) != 2 or min(__Bounds.shape) <= 0:
+    if len(__Bounds.shape) != 2 or min(__Bounds.shape) <= 0 or __Bounds.shape[1] != 2:
         raise ValueError("Incorrectly defined bounds data")
     __Bounds[numpy.isnan(__Bounds)[:,0],0] = -sys.float_info.max
     __Bounds[numpy.isnan(__Bounds)[:,1],1] =  sys.float_info.max
@@ -728,6 +728,210 @@ def RecentredBounds( __Bounds, __Center):
     # Recentre les valeurs numériques de bornes
     return ForceNumericBounds( __Bounds ) - numpy.ravel( __Center ).transpose((-1,1))
 
+# ==============================================================================
+def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
+    """
+    Contrained Extended Kalman Filter
+    """
+    if selfA._parameters["EstimationOf"] == "Parameters":
+        selfA._parameters["StoreInternalVariables"] = True
+    selfA._parameters["Bounds"] = ForceNumericBounds( selfA._parameters["Bounds"] )
+    #
+    # Opérateurs
+    H = HO["Direct"].appliedControledFormTo
+    #
+    if selfA._parameters["EstimationOf"] == "State":
+        M = EM["Direct"].appliedControledFormTo
+    #
+    if CM is not None and "Tangent" in CM and U is not None:
+        Cm = CM["Tangent"].asMatrix(Xb)
+    else:
+        Cm = None
+    #
+    # Durée d'observation et tailles
+    if hasattr(Y,"stepnumber"):
+        duration = Y.stepnumber()
+        __p = numpy.cumprod(Y.shape())[-1]
+    else:
+        duration = 2
+        __p = numpy.array(Y).size
+    #
+    # Précalcul des inversions de B et R
+    if selfA._parameters["StoreInternalVariables"] \
+        or selfA._toStore("CostFunctionJ") \
+        or selfA._toStore("CostFunctionJb") \
+        or selfA._toStore("CostFunctionJo") \
+        or selfA._toStore("CurrentOptimum") \
+        or selfA._toStore("APosterioriCovariance"):
+        BI = B.getI()
+        RI = R.getI()
+    #
+    __n = Xb.size
+    #
+    if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
+        Xn = Xb
+        Pn = B
+        selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
+        selfA.StoredVariables["Analysis"].store( Xb )
+        if selfA._toStore("APosterioriCovariance"):
+            if hasattr(B,"asfullmatrix"):
+                selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
+            else:
+                selfA.StoredVariables["APosterioriCovariance"].store( B )
+        selfA._setInternalState("seed", numpy.random.get_state())
+    elif selfA._parameters["nextStep"]:
+        Xn = selfA._getInternalState("Xn")
+        Pn = selfA._getInternalState("Pn")
+    #
+    if selfA._parameters["EstimationOf"] == "Parameters":
+        XaMin            = Xn
+        previousJMinimum = numpy.finfo(float).max
+    #
+    for step in range(duration-1):
+        if hasattr(Y,"store"):
+            Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
+        else:
+            Ynpu = numpy.ravel( Y ).reshape((__p,1))
+        #
+        Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
+        Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
+        Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
+        Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
+        #
+        if selfA._parameters["EstimationOf"] == "State":
+            Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
+            Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
+            Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
+            Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
+        #
+        if U is not None:
+            if hasattr(U,"store") and len(U)>1:
+                Un = numpy.ravel( U[step] ).reshape((-1,1))
+            elif hasattr(U,"store") and len(U)==1:
+                Un = numpy.ravel( U[0] ).reshape((-1,1))
+            else:
+                Un = numpy.ravel( U ).reshape((-1,1))
+        else:
+            Un = None
+        #
+        if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
+            Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
+            Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
+        #
+        if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
+            Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1))
+            if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
+                Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
+                Xn_predicted = Xn_predicted + Cm * Un
+            Pn_predicted = Q + Mt * Pn * Ma
+        elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
+            # --- > Par principe, M = Id, Q = 0
+            Xn_predicted = Xn
+            Pn_predicted = Pn
+        #
+        if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
+            Xn_predicted = numpy.max(numpy.hstack((Xn_predicted,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
+            Xn_predicted = numpy.min(numpy.hstack((Xn_predicted,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
+        #
+        if selfA._parameters["EstimationOf"] == "State":
+            HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1))
+            _Innovation  = Ynpu - HX_predicted
+        elif selfA._parameters["EstimationOf"] == "Parameters":
+            HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1))
+            _Innovation  = Ynpu - HX_predicted
+            if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
+                _Innovation = _Innovation - Cm * Un
+        #
+        Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
+        Xn = Xn_predicted + Kn * _Innovation
+        Pn = Pn_predicted - Kn * Ht * Pn_predicted
+        #
+        if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
+            Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
+            Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
+        #
+        Xa = Xn # Pointeurs
+        #--------------------------
+        selfA._setInternalState("Xn", Xn)
+        selfA._setInternalState("Pn", Pn)
+        #--------------------------
+        #
+        selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
+        # ---> avec analysis
+        selfA.StoredVariables["Analysis"].store( Xa )
+        if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
+            selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( H((Xa, Un)) )
+        if selfA._toStore("InnovationAtCurrentAnalysis"):
+            selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
+        # ---> avec current state
+        if selfA._parameters["StoreInternalVariables"] \
+            or selfA._toStore("CurrentState"):
+            selfA.StoredVariables["CurrentState"].store( Xn )
+        if selfA._toStore("ForecastState"):
+            selfA.StoredVariables["ForecastState"].store( Xn_predicted )
+        if selfA._toStore("ForecastCovariance"):
+            selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
+        if selfA._toStore("BMA"):
+            selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
+        if selfA._toStore("InnovationAtCurrentState"):
+            selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
+        if selfA._toStore("SimulatedObservationAtCurrentState") \
+            or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
+            selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
+        # ---> autres
+        if selfA._parameters["StoreInternalVariables"] \
+            or selfA._toStore("CostFunctionJ") \
+            or selfA._toStore("CostFunctionJb") \
+            or selfA._toStore("CostFunctionJo") \
+            or selfA._toStore("CurrentOptimum") \
+            or selfA._toStore("APosterioriCovariance"):
+            Jb  = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
+            Jo  = float( 0.5 * _Innovation.T @ (RI @ _Innovation) )
+            J   = Jb + Jo
+            selfA.StoredVariables["CostFunctionJb"].store( Jb )
+            selfA.StoredVariables["CostFunctionJo"].store( Jo )
+            selfA.StoredVariables["CostFunctionJ" ].store( J )
+            #
+            if selfA._toStore("IndexOfOptimum") \
+                or selfA._toStore("CurrentOptimum") \
+                or selfA._toStore("CostFunctionJAtCurrentOptimum") \
+                or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
+                or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
+                or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
+                IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
+            if selfA._toStore("IndexOfOptimum"):
+                selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
+            if selfA._toStore("CurrentOptimum"):
+                selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
+            if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
+                selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
+            if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
+                selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
+            if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
+                selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
+            if selfA._toStore("CostFunctionJAtCurrentOptimum"):
+                selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
+        if selfA._toStore("APosterioriCovariance"):
+            selfA.StoredVariables["APosterioriCovariance"].store( Pn )
+        if selfA._parameters["EstimationOf"] == "Parameters" \
+            and J < previousJMinimum:
+            previousJMinimum    = J
+            XaMin               = Xa
+            if selfA._toStore("APosterioriCovariance"):
+                covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
+    #
+    # Stockage final supplémentaire de l'optimum en estimation de paramètres
+    # ----------------------------------------------------------------------
+    if selfA._parameters["EstimationOf"] == "Parameters":
+        selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
+        selfA.StoredVariables["Analysis"].store( XaMin )
+        if selfA._toStore("APosterioriCovariance"):
+            selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
+        if selfA._toStore("BMA"):
+            selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
+    #
+    return 0
+
 # ==============================================================================
 def enks(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="EnKS16-KalmanFilterFormula"):
     """
@@ -1231,7 +1435,6 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
     """
     if selfA._parameters["EstimationOf"] == "Parameters":
         selfA._parameters["StoreInternalVariables"] = True
-    selfA._parameters["Bounds"] = ForceNumericBounds( selfA._parameters["Bounds"] )
     #
     # Opérateurs
     H = HO["Direct"].appliedControledFormTo
@@ -1302,20 +1505,16 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
         #
         if U is not None:
             if hasattr(U,"store") and len(U)>1:
-                Un = numpy.asmatrix(numpy.ravel( U[step] )).T
+                Un = numpy.ravel( U[step] ).reshape((-1,1))
             elif hasattr(U,"store") and len(U)==1:
-                Un = numpy.asmatrix(numpy.ravel( U[0] )).T
+                Un = numpy.ravel( U[0] ).reshape((-1,1))
             else:
-                Un = numpy.asmatrix(numpy.ravel( U )).T
+                Un = numpy.ravel( U ).reshape((-1,1))
         else:
             Un = None
         #
-        if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
-            Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
-            Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
-        #
         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
-            Xn_predicted = numpy.asmatrix(numpy.ravel( M( (Xn, Un) ) )).T
+            Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1))
             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
                 Xn_predicted = Xn_predicted + Cm * Un
@@ -1325,15 +1524,11 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
             Xn_predicted = Xn
             Pn_predicted = Pn
         #
-        if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
-            Xn_predicted = numpy.max(numpy.hstack((Xn_predicted,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
-            Xn_predicted = numpy.min(numpy.hstack((Xn_predicted,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
-        #
         if selfA._parameters["EstimationOf"] == "State":
-            HX_predicted = numpy.asmatrix(numpy.ravel( H( (Xn_predicted, None) ) )).T
+            HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1))
             _Innovation  = Ynpu - HX_predicted
         elif selfA._parameters["EstimationOf"] == "Parameters":
-            HX_predicted = numpy.asmatrix(numpy.ravel( H( (Xn_predicted, Un) ) )).T
+            HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1))
             _Innovation  = Ynpu - HX_predicted
             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
                 _Innovation = _Innovation - Cm * Un
@@ -1342,10 +1537,6 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
         Xn = Xn_predicted + Kn * _Innovation
         Pn = Pn_predicted - Kn * Ht * Pn_predicted
         #
-        if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
-            Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,0])),axis=1)
-            Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(selfA._parameters["Bounds"])[:,1])),axis=1)
-        #
         Xa = Xn # Pointeurs
         #--------------------------
         selfA._setInternalState("Xn", Xn)
@@ -1381,8 +1572,8 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
             or selfA._toStore("CostFunctionJo") \
             or selfA._toStore("CurrentOptimum") \
             or selfA._toStore("APosterioriCovariance"):
-            Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
-            Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
+            Jb  = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
+            Jo  = float( 0.5 * _Innovation.T @ (RI @ _Innovation) )
             J   = Jb + Jo
             selfA.StoredVariables["CostFunctionJb"].store( Jb )
             selfA.StoredVariables["CostFunctionJo"].store( Jo )
@@ -2790,216 +2981,33 @@ def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula16"
     return 0
 
 # ==============================================================================
-def stdkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
+def std3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
     """
-    Standard Kalman Filter
+    3DVAR
     """
-    if selfA._parameters["EstimationOf"] == "Parameters":
-        selfA._parameters["StoreInternalVariables"] = True
     #
-    # Opérateurs
-    # ----------
-    Ht = HO["Tangent"].asMatrix(Xb)
-    Ha = HO["Adjoint"].asMatrix(Xb)
+    # Initialisations
+    # ---------------
     #
-    if selfA._parameters["EstimationOf"] == "State":
-        Mt = EM["Tangent"].asMatrix(Xb)
-        Ma = EM["Adjoint"].asMatrix(Xb)
+    # Opérateurs
+    Hm = HO["Direct"].appliedTo
+    Ha = HO["Adjoint"].appliedInXTo
     #
-    if CM is not None and "Tangent" in CM and U is not None:
-        Cm = CM["Tangent"].asMatrix(Xb)
+    # Utilisation éventuelle d'un vecteur H(Xb) précalculé
+    if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
+        HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
     else:
-        Cm = None
+        HXb = Hm( Xb )
+    HXb = numpy.asmatrix(numpy.ravel( HXb )).T
+    if Y.size != HXb.size:
+        raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
+    if max(Y.shape) != max(HXb.shape):
+        raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
     #
-    # Durée d'observation et tailles
-    if hasattr(Y,"stepnumber"):
-        duration = Y.stepnumber()
-        __p = numpy.cumprod(Y.shape())[-1]
-    else:
-        duration = 2
-        __p = numpy.array(Y).size
-    #
-    # Précalcul des inversions de B et R
-    if selfA._parameters["StoreInternalVariables"] \
-        or selfA._toStore("CostFunctionJ") \
-        or selfA._toStore("CostFunctionJb") \
-        or selfA._toStore("CostFunctionJo") \
-        or selfA._toStore("CurrentOptimum") \
-        or selfA._toStore("APosterioriCovariance"):
-        BI = B.getI()
-        RI = R.getI()
-    #
-    __n = Xb.size
-    #
-    if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
-        Xn = Xb
-        Pn = B
-        selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
-        selfA.StoredVariables["Analysis"].store( Xb )
-        if selfA._toStore("APosterioriCovariance"):
-            if hasattr(B,"asfullmatrix"):
-                selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
-            else:
-                selfA.StoredVariables["APosterioriCovariance"].store( B )
-        selfA._setInternalState("seed", numpy.random.get_state())
-    elif selfA._parameters["nextStep"]:
-        Xn = selfA._getInternalState("Xn")
-        Pn = selfA._getInternalState("Pn")
-    #
-    if selfA._parameters["EstimationOf"] == "Parameters":
-        XaMin            = Xn
-        previousJMinimum = numpy.finfo(float).max
-    #
-    for step in range(duration-1):
-        if hasattr(Y,"store"):
-            Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
-        else:
-            Ynpu = numpy.ravel( Y ).reshape((__p,1))
-        #
-        if U is not None:
-            if hasattr(U,"store") and len(U)>1:
-                Un = numpy.asmatrix(numpy.ravel( U[step] )).T
-            elif hasattr(U,"store") and len(U)==1:
-                Un = numpy.asmatrix(numpy.ravel( U[0] )).T
-            else:
-                Un = numpy.asmatrix(numpy.ravel( U )).T
-        else:
-            Un = None
-        #
-        if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
-            Xn_predicted = Mt * Xn
-            if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
-                Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
-                Xn_predicted = Xn_predicted + Cm * Un
-            Pn_predicted = Q + Mt * Pn * Ma
-        elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
-            # --- > Par principe, M = Id, Q = 0
-            Xn_predicted = Xn
-            Pn_predicted = Pn
-        #
-        if selfA._parameters["EstimationOf"] == "State":
-            HX_predicted = Ht * Xn_predicted
-            _Innovation  = Ynpu - HX_predicted
-        elif selfA._parameters["EstimationOf"] == "Parameters":
-            HX_predicted = Ht * Xn_predicted
-            _Innovation  = Ynpu - HX_predicted
-            if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
-                _Innovation = _Innovation - Cm * Un
-        #
-        Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
-        Xn = Xn_predicted + Kn * _Innovation
-        Pn = Pn_predicted - Kn * Ht * Pn_predicted
-        #
-        Xa = Xn # Pointeurs
-        #--------------------------
-        selfA._setInternalState("Xn", Xn)
-        selfA._setInternalState("Pn", Pn)
-        #--------------------------
-        #
-        selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
-        # ---> avec analysis
-        selfA.StoredVariables["Analysis"].store( Xa )
-        if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
-            selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Ht * Xa )
-        if selfA._toStore("InnovationAtCurrentAnalysis"):
-            selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
-        # ---> avec current state
-        if selfA._parameters["StoreInternalVariables"] \
-            or selfA._toStore("CurrentState"):
-            selfA.StoredVariables["CurrentState"].store( Xn )
-        if selfA._toStore("ForecastState"):
-            selfA.StoredVariables["ForecastState"].store( Xn_predicted )
-        if selfA._toStore("ForecastCovariance"):
-            selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
-        if selfA._toStore("BMA"):
-            selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
-        if selfA._toStore("InnovationAtCurrentState"):
-            selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
-        if selfA._toStore("SimulatedObservationAtCurrentState") \
-            or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
-            selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
-        # ---> autres
-        if selfA._parameters["StoreInternalVariables"] \
-            or selfA._toStore("CostFunctionJ") \
-            or selfA._toStore("CostFunctionJb") \
-            or selfA._toStore("CostFunctionJo") \
-            or selfA._toStore("CurrentOptimum") \
-            or selfA._toStore("APosterioriCovariance"):
-            Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
-            Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
-            J   = Jb + Jo
-            selfA.StoredVariables["CostFunctionJb"].store( Jb )
-            selfA.StoredVariables["CostFunctionJo"].store( Jo )
-            selfA.StoredVariables["CostFunctionJ" ].store( J )
-            #
-            if selfA._toStore("IndexOfOptimum") \
-                or selfA._toStore("CurrentOptimum") \
-                or selfA._toStore("CostFunctionJAtCurrentOptimum") \
-                or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
-                or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
-                or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
-                IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
-            if selfA._toStore("IndexOfOptimum"):
-                selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
-            if selfA._toStore("CurrentOptimum"):
-                selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
-            if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
-                selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
-            if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
-                selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
-            if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
-                selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
-            if selfA._toStore("CostFunctionJAtCurrentOptimum"):
-                selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
-        if selfA._toStore("APosterioriCovariance"):
-            selfA.StoredVariables["APosterioriCovariance"].store( Pn )
-        if selfA._parameters["EstimationOf"] == "Parameters" \
-            and J < previousJMinimum:
-            previousJMinimum    = J
-            XaMin               = Xa
-            if selfA._toStore("APosterioriCovariance"):
-                covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
-    #
-    # Stockage final supplémentaire de l'optimum en estimation de paramètres
-    # ----------------------------------------------------------------------
-    if selfA._parameters["EstimationOf"] == "Parameters":
-        selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
-        selfA.StoredVariables["Analysis"].store( XaMin )
-        if selfA._toStore("APosterioriCovariance"):
-            selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
-        if selfA._toStore("BMA"):
-            selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
-    #
-    return 0
-
-# ==============================================================================
-def std3dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
-    """
-    3DVAR
-    """
-    #
-    # Initialisations
-    # ---------------
-    #
-    # Opérateurs
-    Hm = HO["Direct"].appliedTo
-    Ha = HO["Adjoint"].appliedInXTo
-    #
-    # Utilisation éventuelle d'un vecteur H(Xb) précalculé
-    if HO["AppliedInX"] is not None and "HXb" in HO["AppliedInX"]:
-        HXb = Hm( Xb, HO["AppliedInX"]["HXb"] )
-    else:
-        HXb = Hm( Xb )
-    HXb = numpy.asmatrix(numpy.ravel( HXb )).T
-    if Y.size != HXb.size:
-        raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
-    if max(Y.shape) != max(HXb.shape):
-        raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
-    #
-    if selfA._toStore("JacobianMatrixAtBackground"):
-        HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
-        HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
-        selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
+    if selfA._toStore("JacobianMatrixAtBackground"):
+        HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
+        HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
+        selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
     #
     # Précalcul des inversions de B et R
     BI = B.getI()
@@ -3465,6 +3473,189 @@ def std4dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
     #
     return 0
 
+# ==============================================================================
+def stdkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
+    """
+    Standard Kalman Filter
+    """
+    if selfA._parameters["EstimationOf"] == "Parameters":
+        selfA._parameters["StoreInternalVariables"] = True
+    #
+    # Opérateurs
+    # ----------
+    Ht = HO["Tangent"].asMatrix(Xb)
+    Ha = HO["Adjoint"].asMatrix(Xb)
+    #
+    if selfA._parameters["EstimationOf"] == "State":
+        Mt = EM["Tangent"].asMatrix(Xb)
+        Ma = EM["Adjoint"].asMatrix(Xb)
+    #
+    if CM is not None and "Tangent" in CM and U is not None:
+        Cm = CM["Tangent"].asMatrix(Xb)
+    else:
+        Cm = None
+    #
+    # Durée d'observation et tailles
+    if hasattr(Y,"stepnumber"):
+        duration = Y.stepnumber()
+        __p = numpy.cumprod(Y.shape())[-1]
+    else:
+        duration = 2
+        __p = numpy.array(Y).size
+    #
+    # Précalcul des inversions de B et R
+    if selfA._parameters["StoreInternalVariables"] \
+        or selfA._toStore("CostFunctionJ") \
+        or selfA._toStore("CostFunctionJb") \
+        or selfA._toStore("CostFunctionJo") \
+        or selfA._toStore("CurrentOptimum") \
+        or selfA._toStore("APosterioriCovariance"):
+        BI = B.getI()
+        RI = R.getI()
+    #
+    __n = Xb.size
+    #
+    if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
+        Xn = Xb
+        Pn = B
+        selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
+        selfA.StoredVariables["Analysis"].store( Xb )
+        if selfA._toStore("APosterioriCovariance"):
+            if hasattr(B,"asfullmatrix"):
+                selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
+            else:
+                selfA.StoredVariables["APosterioriCovariance"].store( B )
+        selfA._setInternalState("seed", numpy.random.get_state())
+    elif selfA._parameters["nextStep"]:
+        Xn = selfA._getInternalState("Xn")
+        Pn = selfA._getInternalState("Pn")
+    #
+    if selfA._parameters["EstimationOf"] == "Parameters":
+        XaMin            = Xn
+        previousJMinimum = numpy.finfo(float).max
+    #
+    for step in range(duration-1):
+        if hasattr(Y,"store"):
+            Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
+        else:
+            Ynpu = numpy.ravel( Y ).reshape((__p,1))
+        #
+        if U is not None:
+            if hasattr(U,"store") and len(U)>1:
+                Un = numpy.asmatrix(numpy.ravel( U[step] )).T
+            elif hasattr(U,"store") and len(U)==1:
+                Un = numpy.asmatrix(numpy.ravel( U[0] )).T
+            else:
+                Un = numpy.asmatrix(numpy.ravel( U )).T
+        else:
+            Un = None
+        #
+        if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
+            Xn_predicted = Mt * Xn
+            if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
+                Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
+                Xn_predicted = Xn_predicted + Cm * Un
+            Pn_predicted = Q + Mt * Pn * Ma
+        elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
+            # --- > Par principe, M = Id, Q = 0
+            Xn_predicted = Xn
+            Pn_predicted = Pn
+        #
+        if selfA._parameters["EstimationOf"] == "State":
+            HX_predicted = Ht * Xn_predicted
+            _Innovation  = Ynpu - HX_predicted
+        elif selfA._parameters["EstimationOf"] == "Parameters":
+            HX_predicted = Ht * Xn_predicted
+            _Innovation  = Ynpu - HX_predicted
+            if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
+                _Innovation = _Innovation - Cm * Un
+        #
+        Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
+        Xn = Xn_predicted + Kn * _Innovation
+        Pn = Pn_predicted - Kn * Ht * Pn_predicted
+        #
+        Xa = Xn # Pointeurs
+        #--------------------------
+        selfA._setInternalState("Xn", Xn)
+        selfA._setInternalState("Pn", Pn)
+        #--------------------------
+        #
+        selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
+        # ---> avec analysis
+        selfA.StoredVariables["Analysis"].store( Xa )
+        if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
+            selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Ht * Xa )
+        if selfA._toStore("InnovationAtCurrentAnalysis"):
+            selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
+        # ---> avec current state
+        if selfA._parameters["StoreInternalVariables"] \
+            or selfA._toStore("CurrentState"):
+            selfA.StoredVariables["CurrentState"].store( Xn )
+        if selfA._toStore("ForecastState"):
+            selfA.StoredVariables["ForecastState"].store( Xn_predicted )
+        if selfA._toStore("ForecastCovariance"):
+            selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
+        if selfA._toStore("BMA"):
+            selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
+        if selfA._toStore("InnovationAtCurrentState"):
+            selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
+        if selfA._toStore("SimulatedObservationAtCurrentState") \
+            or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
+            selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
+        # ---> autres
+        if selfA._parameters["StoreInternalVariables"] \
+            or selfA._toStore("CostFunctionJ") \
+            or selfA._toStore("CostFunctionJb") \
+            or selfA._toStore("CostFunctionJo") \
+            or selfA._toStore("CurrentOptimum") \
+            or selfA._toStore("APosterioriCovariance"):
+            Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
+            Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
+            J   = Jb + Jo
+            selfA.StoredVariables["CostFunctionJb"].store( Jb )
+            selfA.StoredVariables["CostFunctionJo"].store( Jo )
+            selfA.StoredVariables["CostFunctionJ" ].store( J )
+            #
+            if selfA._toStore("IndexOfOptimum") \
+                or selfA._toStore("CurrentOptimum") \
+                or selfA._toStore("CostFunctionJAtCurrentOptimum") \
+                or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
+                or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
+                or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
+                IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
+            if selfA._toStore("IndexOfOptimum"):
+                selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
+            if selfA._toStore("CurrentOptimum"):
+                selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
+            if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
+                selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
+            if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
+                selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
+            if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
+                selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
+            if selfA._toStore("CostFunctionJAtCurrentOptimum"):
+                selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
+        if selfA._toStore("APosterioriCovariance"):
+            selfA.StoredVariables["APosterioriCovariance"].store( Pn )
+        if selfA._parameters["EstimationOf"] == "Parameters" \
+            and J < previousJMinimum:
+            previousJMinimum    = J
+            XaMin               = Xa
+            if selfA._toStore("APosterioriCovariance"):
+                covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
+    #
+    # Stockage final supplémentaire de l'optimum en estimation de paramètres
+    # ----------------------------------------------------------------------
+    if selfA._parameters["EstimationOf"] == "Parameters":
+        selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
+        selfA.StoredVariables["Analysis"].store( XaMin )
+        if selfA._toStore("APosterioriCovariance"):
+            selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
+        if selfA._toStore("BMA"):
+            selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
+    #
+    return 0
+
 # ==============================================================================
 def uckf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
     """
@@ -3520,7 +3711,9 @@ def uckf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
     if selfA._parameters["StoreInternalVariables"] \
         or selfA._toStore("CostFunctionJ") \
         or selfA._toStore("CostFunctionJb") \
-        or selfA._toStore("CostFunctionJo"):
+        or selfA._toStore("CostFunctionJo") \
+        or selfA._toStore("CurrentOptimum") \
+        or selfA._toStore("APosterioriCovariance"):
         BI = B.getI()
         RI = R.getI()
     #
@@ -3552,11 +3745,11 @@ def uckf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
         #
         if U is not None:
             if hasattr(U,"store") and len(U)>1:
-                Un = numpy.asmatrix(numpy.ravel( U[step] )).T
+                Un = numpy.ravel( U[step] ).reshape((-1,1))
             elif hasattr(U,"store") and len(U)==1:
-                Un = numpy.asmatrix(numpy.ravel( U[0] )).T
+                Un = numpy.ravel( U[0] ).reshape((-1,1))
             else:
-                Un = numpy.asmatrix(numpy.ravel( U )).T
+                Un = numpy.ravel( U ).reshape((-1,1))
         else:
             Un = None
         #
@@ -3647,22 +3840,58 @@ def uckf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
         selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
         # ---> avec analysis
         selfA.StoredVariables["Analysis"].store( Xa )
+        if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
+            selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Hm((Xa, Un)) )
+        if selfA._toStore("InnovationAtCurrentAnalysis"):
+            selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
         # ---> avec current state
         if selfA._parameters["StoreInternalVariables"] \
             or selfA._toStore("CurrentState"):
             selfA.StoredVariables["CurrentState"].store( Xn )
+        if selfA._toStore("ForecastState"):
+            selfA.StoredVariables["ForecastState"].store( Xncm )
+        if selfA._toStore("ForecastCovariance"):
+            selfA.StoredVariables["ForecastCovariance"].store( Pnm )
+        if selfA._toStore("BMA"):
+            selfA.StoredVariables["BMA"].store( Xncm - Xa )
         if selfA._toStore("InnovationAtCurrentState"):
             selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
+        if selfA._toStore("SimulatedObservationAtCurrentState") \
+            or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
+            selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Yncm )
+        # ---> autres
         if selfA._parameters["StoreInternalVariables"] \
             or selfA._toStore("CostFunctionJ") \
             or selfA._toStore("CostFunctionJb") \
-            or selfA._toStore("CostFunctionJo"):
+            or selfA._toStore("CostFunctionJo") \
+            or selfA._toStore("CurrentOptimum") \
+            or selfA._toStore("APosterioriCovariance"):
             Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
             Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
             J   = Jb + Jo
             selfA.StoredVariables["CostFunctionJb"].store( Jb )
             selfA.StoredVariables["CostFunctionJo"].store( Jo )
             selfA.StoredVariables["CostFunctionJ" ].store( J )
+            #
+            if selfA._toStore("IndexOfOptimum") \
+                or selfA._toStore("CurrentOptimum") \
+                or selfA._toStore("CostFunctionJAtCurrentOptimum") \
+                or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
+                or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
+                or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
+                IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
+            if selfA._toStore("IndexOfOptimum"):
+                selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
+            if selfA._toStore("CurrentOptimum"):
+                selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
+            if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
+                selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
+            if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
+                selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
+            if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
+                selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
+            if selfA._toStore("CostFunctionJAtCurrentOptimum"):
+                selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
         if selfA._toStore("APosterioriCovariance"):
             selfA.StoredVariables["APosterioriCovariance"].store( Pn )
         if selfA._parameters["EstimationOf"] == "Parameters" \