From 8d455a469bd2d755e5ed4306135800e4c1597821 Mon Sep 17 00:00:00 2001 From: Jean-Philippe ARGAUD Date: Sun, 17 Jan 2021 21:02:24 +0100 Subject: [PATCH] Improvement of algorithm and multifonction use --- src/daComposant/daAlgorithms/4DVAR.py | 16 +++++---- src/daComposant/daCore/NumericObjects.py | 46 +++++++++++++----------- 2 files changed, 34 insertions(+), 28 deletions(-) diff --git a/src/daComposant/daAlgorithms/4DVAR.py b/src/daComposant/daAlgorithms/4DVAR.py index c450ae6..ef64a6e 100644 --- a/src/daComposant/daAlgorithms/4DVAR.py +++ b/src/daComposant/daAlgorithms/4DVAR.py @@ -182,13 +182,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self._toStore("CurrentState") or \ self._toStore("CurrentOptimum"): self.StoredVariables["CurrentState"].store( _X ) - Jb = 0.5 * (_X - Xb).T * BI * (_X - Xb) + Jb = float( 0.5 * (_X - Xb).T * BI * (_X - Xb) ) self.DirectCalculation = [None,] self.DirectInnovation = [None,] Jo = 0. _Xn = _X for step in range(0,duration-1): - self.DirectCalculation.append( _Xn ) if hasattr(Y,"store"): _Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T else: @@ -210,11 +209,14 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, None) ) )).T elif self._parameters["EstimationOf"] == "Parameters": _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, _Un) ) )).T - CmUn(_Xn, _Un) + # + # Stockage de l'état + self.DirectCalculation.append( _Xn ) self.DirectInnovation.append( _YmHMX ) + # # Ajout dans la fonctionnelle d'observation - Jo = Jo + _YmHMX.T * RI * _YmHMX - Jo = 0.5 * Jo - J = float( Jb ) + float( Jo ) + Jo = Jo + 0.5 * float( _YmHMX.T * RI * _YmHMX ) + J = Jb + Jo # self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) ) self.StoredVariables["CostFunctionJb"].store( Jb ) @@ -255,8 +257,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Calcul du gradient par etat adjoint GradJo = GradJo + Ha * RI * _YmHMX # Equivaut pour Ha lineaire à : Ha( (_Xn, RI * _YmHMX) ) GradJo = Ma * GradJo # Equivaut pour Ma lineaire à : Ma( (_Xn, GradJo) ) - GradJ = numpy.asmatrix( numpy.ravel( GradJb ) - numpy.ravel( GradJo ) ).T - return GradJ.A1 + GradJ = numpy.ravel( GradJb ) - numpy.ravel( GradJo ) + return GradJ # # Point de démarrage de l'optimisation : Xini = Xb # ------------------------------------ diff --git a/src/daComposant/daCore/NumericObjects.py b/src/daComposant/daCore/NumericObjects.py index 1adf514..b9379a9 100644 --- a/src/daComposant/daCore/NumericObjects.py +++ b/src/daComposant/daCore/NumericObjects.py @@ -662,27 +662,31 @@ def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): ) # if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast + EMX = M( [(Xn[:,i], Un) for i in range(__m)], argsAsSerie = True ) for i in range(__m): - qi = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__n), Qn, (1,1,1))).T - Xn_predicted[:,i] = numpy.asmatrix(numpy.ravel( M((Xn[:,i], Un)) )).T + qi - HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( H((Xn_predicted[:,i], Un)) )).T + qi = numpy.random.multivariate_normal(numpy.zeros(__n), Qn) + Xn_predicted[:,i] = (numpy.ravel( EMX[i] ) + qi).reshape((__n,-1)) + HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)], + argsAsSerie = True, + returnSerieAsArrayMatrix = True ) if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon ! Cm = Cm.reshape(__n,Un.size) # ADAO & check shape Xn_predicted = Xn_predicted + Cm * Un elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast # --- > Par principe, M = Id, Q = 0 Xn_predicted = Xn - for i in range(__m): - HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( H((Xn_predicted[:,i], Un)) )).T + HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)], + argsAsSerie = True, + returnSerieAsArrayMatrix = True ) # # Mean of forecast and observation of forecast - Xfm = numpy.asmatrix(numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp).astype('float'))).T - Hfm = numpy.asmatrix(numpy.ravel(HX_predicted.mean(axis=1, dtype=mfp).astype('float'))).T + Xfm = Xn_predicted.mean(axis=1, dtype=mfp).astype('float') + Hfm = HX_predicted.mean(axis=1, dtype=mfp).astype('float') # PfHT, HPfHT = 0., 0. for i in range(__m): - Exfi = Xn_predicted[:,i] - Xfm - Eyfi = HX_predicted[:,i] - Hfm + Exfi = Xn_predicted[:,i] - Xfm.reshape((__n,-1)) + Eyfi = (HX_predicted[:,i] - Hfm).reshape((__p,1)) PfHT += Exfi * Eyfi.T HPfHT += Eyfi * Eyfi.T PfHT = (1./(__m-1)) * PfHT @@ -691,8 +695,8 @@ def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): del PfHT, HPfHT # for i in range(__m): - ri = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__p), Rn, (1,1,1))).T - Xn[:,i] = Xn_predicted[:,i] + K * (Ynpu + ri - HX_predicted[:,i]) + ri = numpy.random.multivariate_normal(numpy.zeros(__p), Rn) + Xn[:,i] = Xn_predicted[:,i] + K @ (numpy.ravel(Ynpu) + ri - HX_predicted[:,i]).reshape((__p,1)) # if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies": Xn = CovarianceInflation( Xn, @@ -886,27 +890,27 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, KorV="KalmanFilterFormula"): if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast EMX = M( [(Xn[:,i], Un) for i in range(__m)], argsAsSerie = True ) for i in range(__m): - qi = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__n), Qn, (1,1,1))).T - Xn_predicted[:,i] = numpy.asmatrix(numpy.ravel( EMX[i] )).T + qi - EHX = H( [(Xn_predicted[:,i], Un) for i in range(__m)], argsAsSerie = True ) - for i in range(__m): - HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( EHX[i] )).T + qi = numpy.random.multivariate_normal(numpy.zeros(__n), Qn) + Xn_predicted[:,i] = (numpy.ravel( EMX[i] ) + qi).reshape((__n,-1)) + HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)], + argsAsSerie = True, + returnSerieAsArrayMatrix = True ) if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon ! Cm = Cm.reshape(__n,Un.size) # ADAO & check shape Xn_predicted = Xn_predicted + Cm * Un elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast # --- > Par principe, M = Id, Q = 0 Xn_predicted = Xn - EHX = H( [(Xn_predicted[:,i], Un) for i in range(__m)], argsAsSerie = True ) - for i in range(__m): - HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( EHX[i] )).T + HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)], + argsAsSerie = True, + returnSerieAsArrayMatrix = True ) # # Mean of forecast and observation of forecast Xfm = Xn_predicted.mean(axis=1, dtype=mfp).astype('float') Hfm = HX_predicted.mean(axis=1, dtype=mfp).astype('float') # - EaX = (Xn_predicted - Xfm.reshape((__n,-1))) - EaHX = (HX_predicted - Hfm.reshape((__p,-1))) + EaX = numpy.matrix(Xn_predicted - Xfm.reshape((__n,-1))) + EaHX = numpy.matrix(HX_predicted - Hfm.reshape((__p,-1))) # #-------------------------- if KorV == "KalmanFilterFormula": -- 2.39.2