Salome HOME
Improvement of algorithms arguments validation and tests
[modules/adao.git] / src / daComposant / daAlgorithms / UnscentedKalmanFilter.py
index 43c873069d4a1c1c165af2932bee03a23a11f993..496f5d32399ba54e2ab11932c8c4ba51ffe53b66 100644 (file)
@@ -1,6 +1,6 @@
 # -*- coding: utf-8 -*-
 #
-# Copyright (C) 2008-2019 EDF R&D
+# Copyright (C) 2008-2020 EDF R&D
 #
 # This library is free software; you can redistribute it and/or
 # modify it under the terms of the GNU Lesser General Public
@@ -104,9 +104,16 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             mandatory= ("Xb", "Y", "HO", "R", "B" ),
             optional = ("U", "EM", "CM", "Q"),
             )
+        self.setAttributes(tags=(
+            "DataAssimilation",
+            "NonLinear",
+            "Filter",
+            "Ensemble",
+            "Dynamic",
+            ))
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run(Parameters, Xb, Y, R, B, Q)
+        self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
         #
         if self._parameters["EstimationOf"] == "Parameters":
             self._parameters["StoreInternalVariables"] = True
@@ -164,15 +171,19 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         #
         # Initialisation
         # --------------
+        __n = Xb.size
         Xn = Xb
-        if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(Xn.size)
+        if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
         else:                         Pn = B
         #
-        self.StoredVariables["Analysis"].store( Xn.A1 )
-        if self._toStore("APosterioriCovariance"):
-            self.StoredVariables["APosterioriCovariance"].store( Pn )
-            covarianceXa = Pn
-        Xa = XaMin       = Xb
+        if len(self.StoredVariables["Analysis"])==0 or not self._parameters["nextStep"]:
+            self.StoredVariables["Analysis"].store( numpy.ravel(Xb) )
+            if self._toStore("APosterioriCovariance"):
+                self.StoredVariables["APosterioriCovariance"].store( Pn )
+                covarianceXa = Pn
+        #
+        Xa               = Xb
+        XaMin            = Xb
         previousJMinimum = numpy.finfo(float).max
         #
         for step in range(duration-1):