From: Jean-Philippe ARGAUD Date: Mon, 8 Feb 2021 17:16:05 +0000 (+0100) Subject: Minor improvements and type fixes for internal variables X-Git-Tag: V9_7_0b1~33 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=14a6c3a5d211e4319a57a97637abf5e2f426016a;p=modules%2Fadao.git Minor improvements and type fixes for internal variables --- diff --git a/src/daComposant/daCore/NumericObjects.py b/src/daComposant/daCore/NumericObjects.py index 8554305..762e047 100644 --- a/src/daComposant/daCore/NumericObjects.py +++ b/src/daComposant/daCore/NumericObjects.py @@ -748,14 +748,14 @@ def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"): # # Mean of forecast and observation of forecast Xfm = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,-1)) - Hfm = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1)) + Hfm = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,-1)) # #-------------------------- if VariantM == "KalmanFilterFormula05": PfHT, HPfHT = 0., 0. for i in range(__m): - Exfi = Xn_predicted[:,i] - Xfm - Eyfi = HX_predicted[:,i] - Hfm + Exfi = Xn_predicted[:,i].reshape((__n,-1)) - Xfm + Eyfi = HX_predicted[:,i].reshape((__p,-1)) - Hfm PfHT += Exfi * Eyfi.T HPfHT += Eyfi * Eyfi.T PfHT = (1./(__m-1)) * PfHT @@ -765,7 +765,7 @@ def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"): # for i in range(__m): ri = numpy.random.multivariate_normal(numpy.zeros(__p), Rn) - Xn[:,i] = Xn_predicted[:,i] + numpy.ravel(Kn @ (numpy.ravel(Ynpu) + ri - HX_predicted[:,i])).T + Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(Ynpu) + ri - HX_predicted[:,i]) #-------------------------- elif VariantM == "KalmanFilterFormula16": EpY = EnsembleOfCenteredPerturbations(Ynpu, Rn, __m) @@ -774,10 +774,10 @@ def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"): EaX = EnsembleOfAnomalies( Xn_predicted ) / numpy.sqrt(__m-1) EaY = (HX_predicted - Hfm - EpY + EpYm) / numpy.sqrt(__m-1) # - Kn = EaX @ EaX.T @ numpy.linalg.inv( EaY @ EaY.T) + Kn = EaX @ EaY.T @ numpy.linalg.inv( EaY @ EaY.T) # for i in range(__m): - Xn[:,i] = Xn_predicted[:,i] + Kn @ (numpy.ravel(EpY[:,i]) - HX_predicted[:,i]) + Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(EpY[:,i]) - HX_predicted[:,i]) #-------------------------- else: raise ValueError("VariantM has to be chosen in the authorized methods list.") @@ -993,7 +993,7 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"): # # Anomalies EaX = EnsembleOfAnomalies( Xn_predicted ) - EaHX = numpy.matrix(HX_predicted - Hfm) + EaHX = numpy.array(HX_predicted - Hfm) # #-------------------------- if VariantM == "KalmanFilterFormula": @@ -1012,13 +1012,13 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"): HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm def CostFunction(w): _A = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1)) - _Jo = 0.5 * _A.T * RI * _A + _Jo = 0.5 * _A.T @ (RI * _A) _Jb = 0.5 * (__m-1) * w.T @ w _J = _Jo + _Jb return float(_J) def GradientOfCostFunction(w): _A = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1)) - _GardJo = - EaHX.T * RI * _A + _GardJo = - EaHX.T @ (RI * _A) _GradJb = (__m-1) * w.reshape((__m,1)) _GradJ = _GardJo + _GradJb return numpy.ravel(_GradJ) @@ -1030,7 +1030,7 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"): disp = False, ) # - Hto = EaHX.T * RI * EaHX + Hto = EaHX.T @ (RI * EaHX) Htb = (__m-1) * numpy.eye(__m) Hta = Hto + Htb # @@ -1043,13 +1043,13 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"): HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm def CostFunction(w): _A = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1)) - _Jo = 0.5 * _A.T * RI * _A + _Jo = 0.5 * _A.T @ (RI * _A) _Jb = 0.5 * __m * math.log(1 + 1/__m + w.T @ w) _J = _Jo + _Jb return float(_J) def GradientOfCostFunction(w): _A = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1)) - _GardJo = - EaHX.T * RI * _A + _GardJo = - EaHX.T @ (RI * _A) _GradJb = __m * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w) _GradJ = _GardJo + _GradJb return numpy.ravel(_GradJ) @@ -1061,7 +1061,7 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"): disp = False, ) # - Hto = EaHX.T * RI * EaHX + Hto = EaHX.T @ (RI * EaHX) Htb = __m * \ ( (1 + 1/__m + vw.T @ vw) * numpy.eye(__m) - 2 * vw @ vw.T ) \ / (1 + 1/__m + vw.T @ vw)**2 @@ -1082,7 +1082,7 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"): return float(_J) def GradientOfCostFunction(w): _A = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1)) - _GardJo = - EaHX.T * RI * _A + _GardJo = - EaHX.T @ (RI * _A) _GradJb = (__m+1) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w) _GradJ = _GardJo + _GradJb return numpy.ravel(_GradJ) @@ -1094,7 +1094,7 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"): disp = False, ) # - Hto = EaHX.T * RI * EaHX + Hto = EaHX.T @ (RI * EaHX) Htb = (__m+1) * \ ( (1 + 1/__m + vw.T @ vw) * numpy.eye(__m) - 2 * vw @ vw.T ) \ / (1 + 1/__m + vw.T @ vw)**2 @@ -1109,13 +1109,13 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"): HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm def CostFunction(w): _A = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1)) - _Jo = 0.5 * _A.T * RI * _A + _Jo = 0.5 * _A.T @ (RI * _A) _Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w / (__m-1)) _J = _Jo + _Jb return float(_J) def GradientOfCostFunction(w): _A = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1)) - _GardJo = - EaHX.T * RI * _A + _GardJo = - EaHX.T @ (RI * _A) _GradJb = ((__m+1) / (__m-1)) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w / (__m-1)) _GradJ = _GardJo + _GradJb return numpy.ravel(_GradJ) @@ -1127,7 +1127,7 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="KalmanFilterFormula"): disp = False, ) # - Hto = EaHX.T * RI * EaHX + Hto = EaHX.T @ (RI * EaHX) Htb = ((__m+1) / (__m-1)) * \ ( (1 + 1/__m + vw.T @ vw / (__m-1)) * numpy.eye(__m) - 2 * vw @ vw.T / (__m-1) ) \ / (1 + 1/__m + vw.T @ vw / (__m-1))**2 diff --git a/test/test6901/Verification_des_Assimilation_Algorithms.py b/test/test6901/Verification_des_Assimilation_Algorithms.py index 61d5de3..f83cc40 100644 --- a/test/test6901/Verification_des_Assimilation_Algorithms.py +++ b/test/test6901/Verification_des_Assimilation_Algorithms.py @@ -108,7 +108,7 @@ class Test_Adao(unittest.TestCase): verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa, 5.e-5) verify_similarity_of_algo_results(("LinearLeastSquares", "NonLinearLeastSquares"), Xa, 5.e-7) verify_similarity_of_algo_results(("KalmanFilter", "ExtendedKalmanFilter", "UnscentedKalmanFilter"), Xa, 1.e-14) - #~ verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 5.e-2) + verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 7.e-2) print(" Les resultats obtenus sont corrects.") print("") # @@ -182,7 +182,7 @@ class Test_Adao(unittest.TestCase): print(msg+"\n"+"="*len(msg)) verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa, 5.e-5) verify_similarity_of_algo_results(("KalmanFilter", "ExtendedKalmanFilter", "UnscentedKalmanFilter"), Xa, 1.e14) - #~ verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 5.e-2) + verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 7.e-2) print(" Les resultats obtenus sont corrects.") print("") # diff --git a/test/test6903/Verification_des_mono_et_multi_fonctions_E.py b/test/test6903/Verification_des_mono_et_multi_fonctions_E.py index e06e68f..30da9d2 100644 --- a/test/test6903/Verification_des_mono_et_multi_fonctions_E.py +++ b/test/test6903/Verification_des_mono_et_multi_fonctions_E.py @@ -76,8 +76,7 @@ class Test_Adao(unittest.TestCase): print("\n "+self.test1.__doc__.strip()+"\n") Xa = {} # - #~ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): - for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"): + for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): print("") msg = "Algorithme en test en MonoFonction : %s"%algo print(msg+"\n"+"-"*len(msg)) @@ -96,8 +95,7 @@ class Test_Adao(unittest.TestCase): Xa["Mono/"+algo] = adaopy.get("Analysis")[-1] del adaopy # - #~ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): - for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"): + for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): print("") msg = "Algorithme en test en MultiFonction : %s"%algo print(msg+"\n"+"-"*len(msg)) @@ -119,8 +117,7 @@ class Test_Adao(unittest.TestCase): print("") msg = "Tests des ecarts attendus :" print(msg+"\n"+"="*len(msg)) - #~ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): - for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"): + for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20) print(" Les resultats obtenus sont corrects.") print("")