#
# Mean of forecast and observation of forecast
Xfm = Xn_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__n,-1))
- Hfm = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,1))
+ Hfm = HX_predicted.mean(axis=1, dtype=mfp).astype('float').reshape((__p,-1))
#
#--------------------------
if VariantM == "KalmanFilterFormula05":
PfHT, HPfHT = 0., 0.
for i in range(__m):
- Exfi = Xn_predicted[:,i] - Xfm
- Eyfi = HX_predicted[:,i] - Hfm
+ Exfi = Xn_predicted[:,i].reshape((__n,-1)) - Xfm
+ Eyfi = HX_predicted[:,i].reshape((__p,-1)) - Hfm
PfHT += Exfi * Eyfi.T
HPfHT += Eyfi * Eyfi.T
PfHT = (1./(__m-1)) * PfHT
#
for i in range(__m):
ri = numpy.random.multivariate_normal(numpy.zeros(__p), Rn)
- Xn[:,i] = Xn_predicted[:,i] + numpy.ravel(Kn @ (numpy.ravel(Ynpu) + ri - HX_predicted[:,i])).T
+ Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(Ynpu) + ri - HX_predicted[:,i])
#--------------------------
elif VariantM == "KalmanFilterFormula16":
EpY = EnsembleOfCenteredPerturbations(Ynpu, Rn, __m)
EaX = EnsembleOfAnomalies( Xn_predicted ) / numpy.sqrt(__m-1)
EaY = (HX_predicted - Hfm - EpY + EpYm) / numpy.sqrt(__m-1)
#
- Kn = EaX @ EaX.T @ numpy.linalg.inv( EaY @ EaY.T)
+ Kn = EaX @ EaY.T @ numpy.linalg.inv( EaY @ EaY.T)
#
for i in range(__m):
- Xn[:,i] = Xn_predicted[:,i] + Kn @ (numpy.ravel(EpY[:,i]) - HX_predicted[:,i])
+ Xn[:,i] = numpy.ravel(Xn_predicted[:,i]) + Kn @ (numpy.ravel(EpY[:,i]) - HX_predicted[:,i])
#--------------------------
else:
raise ValueError("VariantM has to be chosen in the authorized methods list.")
#
# Anomalies
EaX = EnsembleOfAnomalies( Xn_predicted )
- EaHX = numpy.matrix(HX_predicted - Hfm)
+ EaHX = numpy.array(HX_predicted - Hfm)
#
#--------------------------
if VariantM == "KalmanFilterFormula":
HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
def CostFunction(w):
_A = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1))
- _Jo = 0.5 * _A.T * RI * _A
+ _Jo = 0.5 * _A.T @ (RI * _A)
_Jb = 0.5 * (__m-1) * w.T @ w
_J = _Jo + _Jb
return float(_J)
def GradientOfCostFunction(w):
_A = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1))
- _GardJo = - EaHX.T * RI * _A
+ _GardJo = - EaHX.T @ (RI * _A)
_GradJb = (__m-1) * w.reshape((__m,1))
_GradJ = _GardJo + _GradJb
return numpy.ravel(_GradJ)
disp = False,
)
#
- Hto = EaHX.T * RI * EaHX
+ Hto = EaHX.T @ (RI * EaHX)
Htb = (__m-1) * numpy.eye(__m)
Hta = Hto + Htb
#
HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
def CostFunction(w):
_A = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1))
- _Jo = 0.5 * _A.T * RI * _A
+ _Jo = 0.5 * _A.T @ (RI * _A)
_Jb = 0.5 * __m * math.log(1 + 1/__m + w.T @ w)
_J = _Jo + _Jb
return float(_J)
def GradientOfCostFunction(w):
_A = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1))
- _GardJo = - EaHX.T * RI * _A
+ _GardJo = - EaHX.T @ (RI * _A)
_GradJb = __m * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
_GradJ = _GardJo + _GradJb
return numpy.ravel(_GradJ)
disp = False,
)
#
- Hto = EaHX.T * RI * EaHX
+ Hto = EaHX.T @ (RI * EaHX)
Htb = __m * \
( (1 + 1/__m + vw.T @ vw) * numpy.eye(__m) - 2 * vw @ vw.T ) \
/ (1 + 1/__m + vw.T @ vw)**2
return float(_J)
def GradientOfCostFunction(w):
_A = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1))
- _GardJo = - EaHX.T * RI * _A
+ _GardJo = - EaHX.T @ (RI * _A)
_GradJb = (__m+1) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w)
_GradJ = _GardJo + _GradJb
return numpy.ravel(_GradJ)
disp = False,
)
#
- Hto = EaHX.T * RI * EaHX
+ Hto = EaHX.T @ (RI * EaHX)
Htb = (__m+1) * \
( (1 + 1/__m + vw.T @ vw) * numpy.eye(__m) - 2 * vw @ vw.T ) \
/ (1 + 1/__m + vw.T @ vw)**2
HXfm = H((Xfm[:,None], Un)) # Eventuellement Hfm
def CostFunction(w):
_A = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1))
- _Jo = 0.5 * _A.T * RI * _A
+ _Jo = 0.5 * _A.T @ (RI * _A)
_Jb = 0.5 * (__m+1) * math.log(1 + 1/__m + w.T @ w / (__m-1))
_J = _Jo + _Jb
return float(_J)
def GradientOfCostFunction(w):
_A = Ynpu - HXfm.reshape((__p,-1)) - (EaHX @ w).reshape((__p,-1))
- _GardJo = - EaHX.T * RI * _A
+ _GardJo = - EaHX.T @ (RI * _A)
_GradJb = ((__m+1) / (__m-1)) * w.reshape((__m,1)) / (1 + 1/__m + w.T @ w / (__m-1))
_GradJ = _GardJo + _GradJb
return numpy.ravel(_GradJ)
disp = False,
)
#
- Hto = EaHX.T * RI * EaHX
+ Hto = EaHX.T @ (RI * EaHX)
Htb = ((__m+1) / (__m-1)) * \
( (1 + 1/__m + vw.T @ vw / (__m-1)) * numpy.eye(__m) - 2 * vw @ vw.T / (__m-1) ) \
/ (1 + 1/__m + vw.T @ vw / (__m-1))**2
verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa, 5.e-5)
verify_similarity_of_algo_results(("LinearLeastSquares", "NonLinearLeastSquares"), Xa, 5.e-7)
verify_similarity_of_algo_results(("KalmanFilter", "ExtendedKalmanFilter", "UnscentedKalmanFilter"), Xa, 1.e-14)
- #~ verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 5.e-2)
+ verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 7.e-2)
print(" Les resultats obtenus sont corrects.")
print("")
#
print(msg+"\n"+"="*len(msg))
verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa, 5.e-5)
verify_similarity_of_algo_results(("KalmanFilter", "ExtendedKalmanFilter", "UnscentedKalmanFilter"), Xa, 1.e14)
- #~ verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 5.e-2)
+ verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 7.e-2)
print(" Les resultats obtenus sont corrects.")
print("")
#
print("\n "+self.test1.__doc__.strip()+"\n")
Xa = {}
#
- #~ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
- for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
print("")
msg = "Algorithme en test en MonoFonction : %s"%algo
print(msg+"\n"+"-"*len(msg))
Xa["Mono/"+algo] = adaopy.get("Analysis")[-1]
del adaopy
#
- #~ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
- for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
print("")
msg = "Algorithme en test en MultiFonction : %s"%algo
print(msg+"\n"+"-"*len(msg))
print("")
msg = "Tests des ecarts attendus :"
print(msg+"\n"+"="*len(msg))
- #~ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
- for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20)
print(" Les resultats obtenus sont corrects.")
print("")