Salome HOME
Minor improvements and fixes for internal variables
[modules/adao.git] / src / daComposant / daAlgorithms / EnsembleKalmanFilter.py
index 8563d693ab0fd62a5ee0906c504fffa7d2c3d2d4..5eb7d5fd007669d436027fbd050b36c455a17c2e 100644 (file)
@@ -1,6 +1,6 @@
 # -*- coding: utf-8 -*-
 #
-# Copyright (C) 2008-2019 EDF R&D
+# Copyright (C) 2008-2021 EDF R&D
 #
 # This library is free software; you can redistribute it and/or
 # modify it under the terms of the GNU Lesser General Public
 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
 
 import logging
-from daCore import BasicObjects, PlatformInfo
-import numpy, math
-mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
+from daCore import BasicObjects, NumericObjects
+import numpy
 
 # ==============================================================================
 class ElementaryAlgorithm(BasicObjects.Algorithm):
     def __init__(self):
         BasicObjects.Algorithm.__init__(self, "ENSEMBLEKALMANFILTER")
+        self.defineRequiredParameter(
+            name     = "Variant",
+            default  = "EnKF",
+            typecast = str,
+            message  = "Variant ou formulation de la méthode",
+            listval  = [
+                "EnKF",
+                "ETKF",
+                "ETKF-N",
+                "MLEF",
+                "IEnKF",
+                ],
+            listadv  = [
+                "StochasticEnKF",
+                "EnKF-05",
+                "EnKF-16",
+                "ETKF-KFF",
+                "ETKF-VAR",
+                "ETKF-N-11",
+                "ETKF-N-15",
+                "ETKF-N-16",
+                "MLEF-T",
+                "MLEF-B",
+                "IEnKF-T",
+                "IEnKF-B",
+                "IEKF",
+                ],
+            )
         self.defineRequiredParameter(
             name     = "NumberOfMembers",
             default  = 100,
             typecast = int,
             message  = "Nombre de membres dans l'ensemble",
-            minval   = -1,
+            minval   = 2,
             )
         self.defineRequiredParameter(
             name     = "EstimationOf",
@@ -43,6 +70,53 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             message  = "Estimation d'etat ou de parametres",
             listval  = ["State", "Parameters"],
             )
+        self.defineRequiredParameter(
+            name     = "InflationType",
+            default  = "MultiplicativeOnAnalysisCovariance",
+            typecast = str,
+            message  = "Méthode d'inflation d'ensemble",
+            listval  = [
+                "MultiplicativeOnAnalysisCovariance",
+                "MultiplicativeOnBackgroundCovariance",
+                "MultiplicativeOnAnalysisAnomalies",
+                "MultiplicativeOnBackgroundAnomalies",
+                "AdditiveOnAnalysisCovariance",
+                "AdditiveOnBackgroundCovariance",
+                "HybridOnBackgroundCovariance",
+                ],
+            )
+        self.defineRequiredParameter(
+            name     = "InflationFactor",
+            default  = 1.,
+            typecast = float,
+            message  = "Facteur d'inflation",
+            minval   = 0.,
+            )
+        self.defineRequiredParameter(
+            name     = "LocalizationType",
+            default  = "SchurLocalization",
+            typecast = str,
+            message  = "Méthode d'inflation d'ensemble",
+            listval  = [
+                "SchurLocalization",
+                ],
+            listadv  = [
+                "CovarianceLocalization",
+                "DomainLocalization",
+                "GaspariCohnLocalization",
+                ],
+            )
+        self.defineRequiredParameter(
+            name     = "LocalizationFactor",
+            default  = 1.,
+            typecast = float,
+            message  = "Facteur de localisation",
+            minval   = 0.,
+            )
+        self.defineRequiredParameter( # Pas de type
+            name     = "LocalizationMatrix",
+            message  = "Matrice de localisation ou de distances",
+            )
         self.defineRequiredParameter(
             name     = "SetSeed",
             typecast = numpy.random.seed,
@@ -60,188 +134,95 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             typecast = tuple,
             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
             listval  = [
+                "Analysis",
                 "APosterioriCorrelations",
                 "APosterioriCovariance",
                 "APosterioriStandardDeviations",
                 "APosterioriVariances",
                 "BMA",
                 "CostFunctionJ",
+                "CostFunctionJAtCurrentOptimum",
                 "CostFunctionJb",
+                "CostFunctionJbAtCurrentOptimum",
                 "CostFunctionJo",
+                "CostFunctionJoAtCurrentOptimum",
+                "CurrentIterationNumber",
+                "CurrentOptimum",
                 "CurrentState",
-                "Innovation",
+                "ForecastState",
+                "IndexOfOptimum",
+                "InnovationAtCurrentAnalysis",
+                "InnovationAtCurrentState",
+                "SimulatedObservationAtCurrentAnalysis",
+                "SimulatedObservationAtCurrentOptimum",
+                "SimulatedObservationAtCurrentState",
                 ]
             )
         self.requireInputArguments(
             mandatory= ("Xb", "Y", "HO", "R", "B"),
             optional = ("U", "EM", "CM", "Q"),
             )
+        self.setAttributes(tags=(
+            "DataAssimilation",
+            "NonLinear",
+            "Filter",
+            "Ensemble",
+            "Dynamic",
+            ))
 
     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
-        self._pre_run(Parameters, Xb, Y, R, B, Q)
+        self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
         #
-        if self._parameters["EstimationOf"] == "Parameters":
-            self._parameters["StoreInternalVariables"] = True
+        #--------------------------
+        # Default EnKF = EnKF-16 = StochasticEnKF
+        if   self._parameters["Variant"] == "EnKF-05":
+            NumericObjects.senkf(self, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula05")
         #
-        # Opérateurs
-        # ----------
-        H = HO["Direct"].appliedControledFormTo
+        elif self._parameters["Variant"] in ["EnKF-16", "StochasticEnKF", "EnKF"]:
+            NumericObjects.senkf(self, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula16")
         #
-        if self._parameters["EstimationOf"] == "State":
-            M = EM["Direct"].appliedControledFormTo
+        #--------------------------
+        # Default ETKF = ETKF-KFF
+        elif self._parameters["Variant"] in ["ETKF-KFF", "ETKF"]:
+            NumericObjects.etkf(self, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula")
         #
-        if CM is not None and "Tangent" in CM and U is not None:
-            Cm = CM["Tangent"].asMatrix(Xb)
-        else:
-            Cm = None
+        elif self._parameters["Variant"] == "ETKF-VAR":
+            NumericObjects.etkf(self, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="Variational")
         #
-        # Nombre de pas identique au nombre de pas d'observations
-        # -------------------------------------------------------
-        if hasattr(Y,"stepnumber"):
-            duration = Y.stepnumber()
-            __p = numpy.cumprod(Y.shape())[-1]
-        else:
-            duration = 2
-            __p = numpy.array(Y).size
+        #--------------------------
+        # Default ETKF-N = ETKF-N-16
+        elif self._parameters["Variant"] == "ETKF-N-11":
+            NumericObjects.etkf(self, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="FiniteSize11")
         #
-        # Précalcul des inversions de B et R
-        # ----------------------------------
-        if self._parameters["StoreInternalVariables"] or \
-            self._toStore("CostFunctionJ") or \
-            self._toStore("CostFunctionJb") or \
-            self._toStore("CostFunctionJo") or \
-            self._toStore("APosterioriCovariance"):
-            BI = B.getI()
-            RI = R.getI()
-        BIdemi = B.choleskyI()
-        RIdemi = R.choleskyI()
+        elif self._parameters["Variant"] == "ETKF-N-15":
+            NumericObjects.etkf(self, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="FiniteSize15")
         #
-        # Initialisation
-        # --------------
-        __n = Xb.size
-        __m = self._parameters["NumberOfMembers"]
-        Xn = numpy.asmatrix(numpy.dot( Xb.reshape(__n,1), numpy.ones((1,__m)) ))
-        if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
-        else:                         Pn = B
-        if hasattr(R,"asfullmatrix"): Rn = R.asfullmatrix(__p)
-        else:                         Rn = R
-        if hasattr(Q,"asfullmatrix"): Qn = Q.asfullmatrix(__n)
-        else:                         Qn = Q
+        elif self._parameters["Variant"] in ["ETKF-N-16", "ETKF-N"]:
+            NumericObjects.etkf(self, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="FiniteSize16")
         #
-        self.StoredVariables["Analysis"].store( Xb.A1 )
-        if self._toStore("APosterioriCovariance"):
-            self.StoredVariables["APosterioriCovariance"].store( Pn )
-            covarianceXa = Pn
-        Xa               = Xb
-        previousJMinimum = numpy.finfo(float).max
+        #--------------------------
+        # Default MLEF = MLEF-T
+        elif self._parameters["Variant"] in ["MLEF-T", "MLEF"]:
+            NumericObjects.mlef(self, Xb, Y, U, HO, EM, CM, R, B, Q, BnotT=False)
         #
-        # Predimensionnement
-        Xn_predicted = numpy.asmatrix(numpy.zeros((__n,__m)))
-        HX_predicted = numpy.asmatrix(numpy.zeros((__p,__m)))
+        elif self._parameters["Variant"] == "MLEF-B":
+            NumericObjects.mlef(self, Xb, Y, U, HO, EM, CM, R, B, Q, BnotT=True)
         #
-        for step in range(duration-1):
-            if hasattr(Y,"store"):
-                Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
-            else:
-                Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
-            #
-            if U is not None:
-                if hasattr(U,"store") and len(U)>1:
-                    Un = numpy.asmatrix(numpy.ravel( U[step] )).T
-                elif hasattr(U,"store") and len(U)==1:
-                    Un = numpy.asmatrix(numpy.ravel( U[0] )).T
-                else:
-                    Un = numpy.asmatrix(numpy.ravel( U )).T
-            else:
-                Un = None
-            #
-            if self._parameters["EstimationOf"] == "State":
-                for i in range(__m):
-                    qi = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__n), Qn)).T
-                    Xn_predicted[:,i] = numpy.asmatrix(numpy.ravel( M((Xn[:,i], Un)) )).T + qi
-                    HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( H((Xn_predicted[:,i], Un)) )).T
-                if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
-                    Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
-                    Xn_predicted = Xn_predicted + Cm * Un
-            elif self._parameters["EstimationOf"] == "Parameters":
-                # --- > Par principe, M = Id, Q = 0
-                Xn_predicted = Xn
-            #
-            Xfm = numpy.asmatrix(numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp))).T
-            Hfm = numpy.asmatrix(numpy.ravel(HX_predicted.mean(axis=1, dtype=mfp))).T
-            Af  = Xn_predicted - Xfm
-            Hf  = HX_predicted - Hfm
-            #
-            PfHT, HPfHT = 0., 0.
-            for i in range(__m):
-                PfHT  += Af[:,i] * Hf[:,i].T
-                HPfHT += Hf[:,i] * Hf[:,i].T
-            PfHT  = (1./(__m-1)) * PfHT
-            HPfHT = (1./(__m-1)) * HPfHT
-            #
-            K = PfHT * ( R + HPfHT ).I
-            #
-            Yo = numpy.asmatrix(numpy.zeros((__p,__m)))
-            for i in range(__m):
-                ri = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__p), Rn)).T
-                Yo[:,i] = Ynpu + ri
-            #
-            for i in range(__m):
-                Xn[:,i] = Xn_predicted[:,i] + K * (Yo[:,i] - HX_predicted[:,i])
-            #
-            Xa = Xn.mean(axis=1, dtype=mfp)
-            self.StoredVariables["Analysis"].store( Xa )
-            #
-            del Yo, PfHT, HPfHT
-            if self._parameters["StoreInternalVariables"] or \
-                self._toStore("CostFunctionJ") or \
-                self._toStore("CostFunctionJb") or \
-                self._toStore("CostFunctionJo") or \
-                self._toStore("APosterioriCovariance") or \
-                self._toStore("Innovation"):
-                d = Ynpu - numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
-                self.StoredVariables["Innovation"].store( d )
-            if self._parameters["StoreInternalVariables"] \
-                or self._toStore("CurrentState"):
-                self.StoredVariables["CurrentState"].store( Xn )
-            if self._parameters["StoreInternalVariables"] or \
-                self._toStore("CostFunctionJ") or \
-                self._toStore("CostFunctionJb") or \
-                self._toStore("CostFunctionJo") or \
-                self._toStore("APosterioriCovariance"):
-                Jb  = 0.5 * (Xa - Xb).T * BI * (Xa - Xb)
-                Jo  = 0.5 * d.T * RI * d
-                J   = float( Jb ) + float( Jo )
-                self.StoredVariables["CostFunctionJb"].store( Jb )
-                self.StoredVariables["CostFunctionJo"].store( Jo )
-                self.StoredVariables["CostFunctionJ" ].store( J )
-            if self._toStore("APosterioriCovariance"):
-                Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
-                Ht = Ht.reshape(__p,__n) # ADAO & check shape
-                Pf = 0.
-                for i in range(__m):
-                    Pf += Af[:,i] * Af[:,i].T
-                Pf = (1./(__m-1)) * Pf
-                Pn = (1. - K * Ht) * Pf
-                self.StoredVariables["APosterioriCovariance"].store( Pn )
-                if J < previousJMinimum:
-                    previousJMinimum  = J
-                    Xa                = Xn
-                    covarianceXa      = Pn
+        #--------------------------
+        # Default IEnKF = IEnKF-T
+        elif self._parameters["Variant"] in ["IEnKF-T", "IEnKF"]:
+            NumericObjects.ienkf(self, Xb, Y, U, HO, EM, CM, R, B, Q, BnotT=False)
         #
-        # Stockage supplementaire de l'optimum en estimation de parametres
-        # ----------------------------------------------------------------
-        if self._parameters["EstimationOf"] == "Parameters":
-            self.StoredVariables["Analysis"].store( Xa.A1 )
-            if self._toStore("APosterioriCovariance"):
-                self.StoredVariables["APosterioriCovariance"].store( covarianceXa )
+        elif self._parameters["Variant"] in ["IEnKF-B", "IEKF"]:
+            NumericObjects.ienkf(self, Xb, Y, U, HO, EM, CM, R, B, Q, BnotT=True)
         #
-        if self._toStore("BMA"):
-            self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
+        #--------------------------
+        else:
+            raise ValueError("Error in Variant name: %s"%self._parameters["Variant"])
         #
         self._post_run(HO)
         return 0
 
 # ==============================================================================
 if __name__ == "__main__":
-    print('\n AUTODIAGNOSTIC \n')
+    print('\n AUTODIAGNOSTIC\n')