]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
Minor improvements and type fixes for internal variables
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Mon, 8 Feb 2021 17:16:05 +0000 (18:16 +0100)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Mon, 8 Feb 2021 17:37:34 +0000 (18:37 +0100)
src/daComposant/daCore/NumericObjects.py
test/test6901/Verification_des_Assimilation_Algorithms.py
test/test6903/Verification_des_mono_et_multi_fonctions_E.py

index 8554305b2df7e40d5418767c508dc1e182d4934a..762e047e64e70031bfea3e6b71b16902256d28f8 100644 (file)
@@ -748,14 +748,14 @@ def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
         #
         # Mean of forecast and observation of forecast
         Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,-1))
-        Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
+        Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,-1))
         #
         #--------------------------
         if VariantM == "KalmanFilterFormula05":
             PfHT, HPfHT = 0., 0.
             for i in range(__m):
-                Exfi = Xn_predicted[:,i] - Xfm
-                Eyfi = HX_predicted[:,i] - Hfm
+                Exfi = Xn_predicted[:,i].reshape((__n,-1)) - Xfm
+                Eyfi = HX_predicted[:,i].reshape((__p,-1)) - Hfm
                 PfHT  += Exfi * Eyfi.T
                 HPfHT += Eyfi * Eyfi.T
             PfHT  = (1./(__m-1)) * PfHT
@@ -765,7 +765,7 @@ def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
             #
             for i in range(__m):
                 ri = numpy.random.multivariate_normal(numpy.zeros(__p), Rn)
-                Xn[:,i] = Xn_predicted[:,i] + numpy.ravel(Kn @ (numpy.ravel(Ynpu) + ri - HX_predicted[:,i])).T
+                Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(Ynpu) + ri - HX_predicted[:,i])
         #--------------------------
         elif VariantM == "KalmanFilterFormula16":
             EpY   = EnsembleOfCenteredPerturbations(Ynpu, Rn, __m)
@@ -774,10 +774,10 @@ def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
             EaX   = EnsembleOfAnomalies( Xn_predicted ) / numpy.sqrt(__m-1)
             EaY = (HX_predicted - Hfm - EpY + EpYm) / numpy.sqrt(__m-1)
             #
-            Kn = EaX @ EaX.T @ numpy.linalg.inv( EaY @ EaY.T)
+            Kn = EaX @ EaY.T @ numpy.linalg.inv( EaY @ EaY.T)
             #
             for i in range(__m):
-                Xn[:,i] = Xn_predicted[:,i] + Kn @ (numpy.ravel(EpY[:,i]) - HX_predicted[:,i])
+                Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(EpY[:,i]) - HX_predicted[:,i])
         #--------------------------
         else:
             raise ValueError("VariantM has to be chosen in the authorized methods list.")
@@ -993,7 +993,7 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
         #
         # Anomalies
         EaX   = EnsembleOfAnomalies( Xn_predicted )
-        EaHX  = numpy.matrix(HX_predicted - Hfm)
+        EaHX  = numpy.array(HX_predicted - Hfm)
         #
         #--------------------------
         if VariantM == "KalmanFilterFormula":
@@ -1012,13 +1012,13 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
             def CostFunction(w):
                 _A  = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1))
-                _Jo = 0.5 * _A.T * RI * _A
+                _Jo = 0.5 * _A.T @ (RI * _A)
                 _Jb = 0.5 * (__m-1) * w.T @ w
                 _J  = _Jo + _Jb
                 return float(_J)
             def GradientOfCostFunction(w):
                 _A  = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1))
-                _GardJo = - EaHX.T * RI * _A
+                _GardJo = - EaHX.T @ (RI * _A)
                 _GradJb = (__m-1) * w.reshape((__m,1))
                 _GradJ  = _GardJo + _GradJb
                 return numpy.ravel(_GradJ)
@@ -1030,7 +1030,7 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
                 disp        = False,
                 )
             #
-            Hto = EaHX.T * RI * EaHX
+            Hto = EaHX.T @ (RI * EaHX)
             Htb = (__m-1) * numpy.eye(__m)
             Hta = Hto + Htb
             #
@@ -1043,13 +1043,13 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
             def CostFunction(w):
                 _A  = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1))
-                _Jo = 0.5 * _A.T * RI * _A
+                _Jo = 0.5 * _A.T @ (RI * _A)
                 _Jb = 0.5 * __m * math.log(1 + 1/__m + w.T @ w)
                 _J  = _Jo + _Jb
                 return float(_J)
             def GradientOfCostFunction(w):
                 _A  = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1))
-                _GardJo = - EaHX.T * RI * _A
+                _GardJo = - EaHX.T @ (RI * _A)
                 _GradJb = __m * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
                 _GradJ  = _GardJo + _GradJb
                 return numpy.ravel(_GradJ)
@@ -1061,7 +1061,7 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
                 disp        = False,
                 )
             #
-            Hto = EaHX.T * RI * EaHX
+            Hto = EaHX.T @ (RI * EaHX)
             Htb = __m * \
                 ( (1 + 1/__m + vw.T @ vw) * numpy.eye(__m) - 2 * vw @ vw.T ) \
                 / (1 + 1/__m + vw.T @ vw)**2
@@ -1082,7 +1082,7 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
                 return float(_J)
             def GradientOfCostFunction(w):
                 _A  = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1))
-                _GardJo = - EaHX.T * RI * _A
+                _GardJo = - EaHX.T @ (RI * _A)
                 _GradJb = (__m+1) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
                 _GradJ  = _GardJo + _GradJb
                 return numpy.ravel(_GradJ)
@@ -1094,7 +1094,7 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
                 disp        = False,
                 )
             #
-            Hto = EaHX.T * RI * EaHX
+            Hto = EaHX.T @ (RI * EaHX)
             Htb = (__m+1) * \
                 ( (1 + 1/__m + vw.T @ vw) * numpy.eye(__m) - 2 * vw @ vw.T ) \
                 / (1 + 1/__m + vw.T @ vw)**2
@@ -1109,13 +1109,13 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
             HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
             def CostFunction(w):
                 _A  = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1))
-                _Jo = 0.5 * _A.T * RI * _A
+                _Jo = 0.5 * _A.T @ (RI * _A)
                 _Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w / (__m-1))
                 _J  = _Jo + _Jb
                 return float(_J)
             def GradientOfCostFunction(w):
                 _A  = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1))
-                _GardJo = - EaHX.T * RI * _A
+                _GardJo = - EaHX.T @ (RI * _A)
                 _GradJb = ((__m+1) / (__m-1)) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w / (__m-1))
                 _GradJ  = _GardJo + _GradJb
                 return numpy.ravel(_GradJ)
@@ -1127,7 +1127,7 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"):
                 disp        = False,
                 )
             #
-            Hto = EaHX.T * RI * EaHX
+            Hto = EaHX.T @ (RI * EaHX)
             Htb = ((__m+1) / (__m-1)) * \
                 ( (1 + 1/__m + vw.T @ vw / (__m-1)) * numpy.eye(__m) - 2 * vw @ vw.T / (__m-1) ) \
                 / (1 + 1/__m + vw.T @ vw / (__m-1))**2
index 61d5de3be9c0d6f89f0fbe1c17a56d40d88e9206..f83cc40c50592178b326800e2b5d3d61ed945a46 100644 (file)
@@ -108,7 +108,7 @@ class Test_Adao(unittest.TestCase):
         verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa, 5.e-5)
         verify_similarity_of_algo_results(("LinearLeastSquares", "NonLinearLeastSquares"), Xa, 5.e-7)
         verify_similarity_of_algo_results(("KalmanFilter", "ExtendedKalmanFilter", "UnscentedKalmanFilter"), Xa, 1.e-14)
-        #~ verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 5.e-2)
+        verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 7.e-2)
         print("  Les resultats obtenus sont corrects.")
         print("")
         #
@@ -182,7 +182,7 @@ class Test_Adao(unittest.TestCase):
         print(msg+"\n"+"="*len(msg))
         verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa, 5.e-5)
         verify_similarity_of_algo_results(("KalmanFilter", "ExtendedKalmanFilter", "UnscentedKalmanFilter"), Xa, 1.e14)
-        #~ verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 5.e-2)
+        verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 7.e-2)
         print("  Les resultats obtenus sont corrects.")
         print("")
         #
index e06e68f9a0faa3a8d076d0cf87b38be1ebb9f74e..30da9d236702101e7c27256d190f80d29512e004 100644 (file)
@@ -76,8 +76,7 @@ class Test_Adao(unittest.TestCase):
         print("\n        "+self.test1.__doc__.strip()+"\n")
         Xa = {}
         #
-        #~ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
-        for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+        for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
             print("")
             msg = "Algorithme en test en MonoFonction : %s"%algo
             print(msg+"\n"+"-"*len(msg))
@@ -96,8 +95,7 @@ class Test_Adao(unittest.TestCase):
             Xa["Mono/"+algo] = adaopy.get("Analysis")[-1]
             del adaopy
         #
-        #~ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
-        for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+        for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
             print("")
             msg = "Algorithme en test en MultiFonction : %s"%algo
             print(msg+"\n"+"-"*len(msg))
@@ -119,8 +117,7 @@ class Test_Adao(unittest.TestCase):
         print("")
         msg = "Tests des ecarts attendus :"
         print(msg+"\n"+"="*len(msg))
-        #~ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
-        for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+        for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
             verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20)
         print("  Les resultats obtenus sont corrects.")
         print("")