]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
Improvement of algorithm and multifonction use
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Sun, 17 Jan 2021 20:02:24 +0000 (21:02 +0100)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Mon, 18 Jan 2021 06:38:51 +0000 (07:38 +0100)
src/daComposant/daAlgorithms/4DVAR.py
src/daComposant/daCore/NumericObjects.py

index c450ae6c8832a8061fbb0dee491d46c2f430f15b..ef64a6e9866d03a509adc06f138638a605ede3b9 100644 (file)
@@ -182,13 +182,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 self._toStore("CurrentState") or \
                 self._toStore("CurrentOptimum"):
                 self.StoredVariables["CurrentState"].store( _X )
-            Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
+            Jb  = float( 0.5 * (_X - Xb).T * BI * (_X - Xb) )
             self.DirectCalculation = [None,]
             self.DirectInnovation  = [None,]
             Jo  = 0.
             _Xn = _X
             for step in range(0,duration-1):
-                self.DirectCalculation.append( _Xn )
                 if hasattr(Y,"store"):
                     _Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
                 else:
@@ -210,11 +209,14 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                     _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, None) ) )).T
                 elif self._parameters["EstimationOf"] == "Parameters":
                     _YmHMX = _Ynpu - numpy.asmatrix(numpy.ravel( Hm( (_Xn, _Un) ) )).T - CmUn(_Xn, _Un)
+                #
+                # Stockage de l'état
+                self.DirectCalculation.append( _Xn )
                 self.DirectInnovation.append( _YmHMX )
+                #
                 # Ajout dans la fonctionnelle d'observation
-                Jo = Jo + _YmHMX.T * RI * _YmHMX
-            Jo  = 0.5 * Jo
-            J   = float( Jb ) + float( Jo )
+                Jo = Jo + 0.5 * float( _YmHMX.T * RI * _YmHMX )
+            J = Jb + Jo
             #
             self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["CostFunctionJ"]) )
             self.StoredVariables["CostFunctionJb"].store( Jb )
@@ -255,8 +257,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
                 # Calcul du gradient par etat adjoint
                 GradJo = GradJo + Ha * RI * _YmHMX # Equivaut pour Ha lineaire à : Ha( (_Xn, RI * _YmHMX) )
                 GradJo = Ma * GradJo               # Equivaut pour Ma lineaire à : Ma( (_Xn, GradJo) )
-            GradJ   = numpy.asmatrix( numpy.ravel( GradJb ) - numpy.ravel( GradJo ) ).T
-            return GradJ.A1
+            GradJ = numpy.ravel( GradJb ) - numpy.ravel( GradJo )
+            return GradJ
         #
         # Point de démarrage de l'optimisation : Xini = Xb
         # ------------------------------------
index 1adf514f72146354a6bfc6180d42b9628a28ca34..b9379a90187d4df816ba7fb66968fbf6fed9adcb 100644 (file)
@@ -662,27 +662,31 @@ def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
                 )
         #
         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
+            EMX = M( [(Xn[:,i], Un) for i in range(__m)], argsAsSerie = True )
             for i in range(__m):
-                qi = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__n), Qn, (1,1,1))).T
-                Xn_predicted[:,i] = numpy.asmatrix(numpy.ravel( M((Xn[:,i], Un)) )).T + qi
-                HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( H((Xn_predicted[:,i], Un)) )).T
+                qi = numpy.random.multivariate_normal(numpy.zeros(__n), Qn)
+                Xn_predicted[:,i] = (numpy.ravel( EMX[i] ) + qi).reshape((__n,-1))
+            HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
+                argsAsSerie = True,
+                returnSerieAsArrayMatrix = True )
             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
                 Xn_predicted = Xn_predicted + Cm * Un
         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
             # --- > Par principe, M = Id, Q = 0
             Xn_predicted = Xn
-            for i in range(__m):
-                HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( H((Xn_predicted[:,i], Un)) )).T
+            HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
+                argsAsSerie = True,
+                returnSerieAsArrayMatrix = True )
         #
         # Mean of forecast and observation of forecast
-        Xfm  = numpy.asmatrix(numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp).astype('float'))).T
-        Hfm  = numpy.asmatrix(numpy.ravel(HX_predicted.mean(axis=1, dtype=mfp).astype('float'))).T
+        Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float')
+        Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float')
         #
         PfHT, HPfHT = 0., 0.
         for i in range(__m):
-            Exfi = Xn_predicted[:,i] - Xfm
-            Eyfi = HX_predicted[:,i] - Hfm
+            Exfi = Xn_predicted[:,i] - Xfm.reshape((__n,-1))
+            Eyfi = (HX_predicted[:,i] - Hfm).reshape((__p,1))
             PfHT  += Exfi * Eyfi.T
             HPfHT += Eyfi * Eyfi.T
         PfHT  = (1./(__m-1)) * PfHT
@@ -691,8 +695,8 @@ def senkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
         del PfHT, HPfHT
         #
         for i in range(__m):
-            ri = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__p), Rn, (1,1,1))).T
-            Xn[:,i] = Xn_predicted[:,i] + K * (Ynpu + ri - HX_predicted[:,i])
+            ri = numpy.random.multivariate_normal(numpy.zeros(__p), Rn)
+            Xn[:,i] = Xn_predicted[:,i] + K @ (numpy.ravel(Ynpu) + ri - HX_predicted[:,i]).reshape((__p,1))
         #
         if selfA._parameters["InflationType"] == "MultiplicativeOnAnalysisAnomalies":
             Xn = CovarianceInflation( Xn,
@@ -886,27 +890,27 @@ def etkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, KorV="KalmanFilterFormula"):
         if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
             EMX = M( [(Xn[:,i], Un) for i in range(__m)], argsAsSerie = True )
             for i in range(__m):
-                qi = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__n), Qn, (1,1,1))).T
-                Xn_predicted[:,i] = numpy.asmatrix(numpy.ravel( EMX[i] )).T + qi
-            EHX = H( [(Xn_predicted[:,i], Un) for i in range(__m)], argsAsSerie = True )
-            for i in range(__m):
-                HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( EHX[i] )).T
+                qi = numpy.random.multivariate_normal(numpy.zeros(__n), Qn)
+                Xn_predicted[:,i] = (numpy.ravel( EMX[i] ) + qi).reshape((__n,-1))
+            HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
+                argsAsSerie = True,
+                returnSerieAsArrayMatrix = True )
             if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
                 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
                 Xn_predicted = Xn_predicted + Cm * Un
         elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
             # --- > Par principe, M = Id, Q = 0
             Xn_predicted = Xn
-            EHX = H( [(Xn_predicted[:,i], Un) for i in range(__m)], argsAsSerie = True )
-            for i in range(__m):
-                HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( EHX[i] )).T
+            HX_predicted = H( [(Xn_predicted[:,i], Un) for i in range(__m)],
+                argsAsSerie = True,
+                returnSerieAsArrayMatrix = True )
         #
         # Mean of forecast and observation of forecast
         Xfm  = Xn_predicted.mean(axis=1, dtype=mfp).astype('float')
         Hfm  = HX_predicted.mean(axis=1, dtype=mfp).astype('float')
         #
-        EaX   = (Xn_predicted - Xfm.reshape((__n,-1)))
-        EaHX  = (HX_predicted - Hfm.reshape((__p,-1)))
+        EaX   = numpy.matrix(Xn_predicted - Xfm.reshape((__n,-1)))
+        EaHX  = numpy.matrix(HX_predicted - Hfm.reshape((__p,-1)))
         #
         #--------------------------
         if KorV == "KalmanFilterFormula":