Salome HOME
Update forecast use and documentation in filters
[modules/adao.git] / src / daComposant / daAlgorithms / EnsembleKalmanFilter.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2020 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects, PlatformInfo
25 import numpy, math
26 mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
27
28 # ==============================================================================
29 class ElementaryAlgorithm(BasicObjects.Algorithm):
30     def __init__(self):
31         BasicObjects.Algorithm.__init__(self, "ENSEMBLEKALMANFILTER")
32         self.defineRequiredParameter(
33             name     = "NumberOfMembers",
34             default  = 100,
35             typecast = int,
36             message  = "Nombre de membres dans l'ensemble",
37             minval   = -1,
38             )
39         self.defineRequiredParameter(
40             name     = "EstimationOf",
41             default  = "State",
42             typecast = str,
43             message  = "Estimation d'etat ou de parametres",
44             listval  = ["State", "Parameters"],
45             )
46         self.defineRequiredParameter(
47             name     = "SetSeed",
48             typecast = numpy.random.seed,
49             message  = "Graine fixée pour le générateur aléatoire",
50             )
51         self.defineRequiredParameter(
52             name     = "StoreInternalVariables",
53             default  = False,
54             typecast = bool,
55             message  = "Stockage des variables internes ou intermédiaires du calcul",
56             )
57         self.defineRequiredParameter(
58             name     = "StoreSupplementaryCalculations",
59             default  = [],
60             typecast = tuple,
61             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
62             listval  = [
63                 "Analysis",
64                 "APosterioriCorrelations",
65                 "APosterioriCovariance",
66                 "APosterioriStandardDeviations",
67                 "APosterioriVariances",
68                 "BMA",
69                 "CostFunctionJ",
70                 "CostFunctionJAtCurrentOptimum",
71                 "CostFunctionJb",
72                 "CostFunctionJbAtCurrentOptimum",
73                 "CostFunctionJo",
74                 "CostFunctionJoAtCurrentOptimum",
75                 "CurrentOptimum",
76                 "CurrentState",
77                 "ForecastState",
78                 "IndexOfOptimum",
79                 "InnovationAtCurrentAnalysis",
80                 "InnovationAtCurrentState",
81                 "SimulatedObservationAtCurrentAnalysis",
82                 "SimulatedObservationAtCurrentOptimum",
83                 "SimulatedObservationAtCurrentState",
84                 ]
85             )
86         self.requireInputArguments(
87             mandatory= ("Xb", "Y", "HO", "R", "B"),
88             optional = ("U", "EM", "CM", "Q"),
89             )
90
91     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
92         self._pre_run(Parameters, Xb, Y, R, B, Q)
93         #
94         if self._parameters["EstimationOf"] == "Parameters":
95             self._parameters["StoreInternalVariables"] = True
96         #
97         # Opérateurs
98         # ----------
99         H = HO["Direct"].appliedControledFormTo
100         #
101         if self._parameters["EstimationOf"] == "State":
102             M = EM["Direct"].appliedControledFormTo
103         #
104         if CM is not None and "Tangent" in CM and U is not None:
105             Cm = CM["Tangent"].asMatrix(Xb)
106         else:
107             Cm = None
108         #
109         # Nombre de pas identique au nombre de pas d'observations
110         # -------------------------------------------------------
111         if hasattr(Y,"stepnumber"):
112             duration = Y.stepnumber()
113             __p = numpy.cumprod(Y.shape())[-1]
114         else:
115             duration = 2
116             __p = numpy.array(Y).size
117         #
118         # Précalcul des inversions de B et R
119         # ----------------------------------
120         if self._parameters["StoreInternalVariables"] \
121             or self._toStore("CostFunctionJ") \
122             or self._toStore("CostFunctionJb") \
123             or self._toStore("CostFunctionJo") \
124             or self._toStore("CurrentOptimum") \
125             or self._toStore("APosterioriCovariance"):
126             BI = B.getI()
127             RI = R.getI()
128         # BIdemi = B.choleskyI()
129         # RIdemi = R.choleskyI()
130         #
131         # Initialisation
132         # --------------
133         __n = Xb.size
134         __m = self._parameters["NumberOfMembers"]
135         Xn = numpy.asmatrix(numpy.dot( Xb.reshape(__n,1), numpy.ones((1,__m)) ))
136         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
137         else:                         Pn = B
138         if hasattr(R,"asfullmatrix"): Rn = R.asfullmatrix(__p)
139         else:                         Rn = R
140         if hasattr(Q,"asfullmatrix"): Qn = Q.asfullmatrix(__n)
141         else:                         Qn = Q
142         #
143         if len(self.StoredVariables["Analysis"])==0 or not self._parameters["nextStep"]:
144             self.StoredVariables["Analysis"].store( numpy.ravel(Xb) )
145             if self._toStore("APosterioriCovariance"):
146                 self.StoredVariables["APosterioriCovariance"].store( Pn )
147                 covarianceXa = Pn
148         #
149         Xa               = Xb
150         XaMin            = Xb
151         previousJMinimum = numpy.finfo(float).max
152         #
153         # Predimensionnement
154         Xn_predicted = numpy.asmatrix(numpy.zeros((__n,__m)))
155         HX_predicted = numpy.asmatrix(numpy.zeros((__p,__m)))
156         #
157         for step in range(duration-1):
158             if hasattr(Y,"store"):
159                 Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
160             else:
161                 Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
162             #
163             if U is not None:
164                 if hasattr(U,"store") and len(U)>1:
165                     Un = numpy.asmatrix(numpy.ravel( U[step] )).T
166                 elif hasattr(U,"store") and len(U)==1:
167                     Un = numpy.asmatrix(numpy.ravel( U[0] )).T
168                 else:
169                     Un = numpy.asmatrix(numpy.ravel( U )).T
170             else:
171                 Un = None
172             #
173             if self._parameters["EstimationOf"] == "State":
174                 for i in range(__m):
175                     qi = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__n), Qn, (1,1,1))).T
176                     Xn_predicted[:,i] = numpy.asmatrix(numpy.ravel( M((Xn[:,i], Un)) )).T + qi
177                     HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( H((Xn_predicted[:,i], Un)) )).T
178                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
179                     Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
180                     Xn_predicted = Xn_predicted + Cm * Un
181             elif self._parameters["EstimationOf"] == "Parameters":
182                 # --- > Par principe, M = Id, Q = 0
183                 Xn_predicted = Xn
184             #
185             Xfm = numpy.asmatrix(numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp))).T
186             Hfm = numpy.asmatrix(numpy.ravel(HX_predicted.mean(axis=1, dtype=mfp))).T
187             #
188             PfHT, HPfHT = 0., 0.
189             for i in range(__m):
190                 Exfi = Xn_predicted[:,i] - Xfm
191                 Eyfi = HX_predicted[:,i] - Hfm
192                 PfHT  += Exfi * Eyfi.T
193                 HPfHT += Eyfi * Eyfi.T
194             PfHT  = (1./(__m-1)) * PfHT
195             HPfHT = (1./(__m-1)) * HPfHT
196             K     = PfHT * ( R + HPfHT ).I
197             del PfHT, HPfHT
198             #
199             for i in range(__m):
200                 ri = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__p), Rn, (1,1,1))).T
201                 Xn[:,i] = Xn_predicted[:,i] + K * (Ynpu + ri - HX_predicted[:,i])
202             #
203             Xa = Xn.mean(axis=1, dtype=mfp)
204             #
205             if self._parameters["StoreInternalVariables"] \
206                 or self._toStore("CostFunctionJ") \
207                 or self._toStore("CostFunctionJb") \
208                 or self._toStore("CostFunctionJo") \
209                 or self._toStore("APosterioriCovariance") \
210                 or self._toStore("InnovationAtCurrentAnalysis") \
211                 or self._toStore("SimulatedObservationAtCurrentAnalysis") \
212                 or self._toStore("SimulatedObservationAtCurrentOptimum"):
213                 _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
214                 _Innovation = Ynpu - _HXa
215             #
216             # ---> avec analysis
217             self.StoredVariables["Analysis"].store( Xa )
218             if self._toStore("SimulatedObservationAtCurrentAnalysis"):
219                 self.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
220             if self._toStore("InnovationAtCurrentAnalysis"):
221                 self.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
222             # ---> avec current state
223             if self._parameters["StoreInternalVariables"] \
224                 or self._toStore("CurrentState"):
225                 self.StoredVariables["CurrentState"].store( Xn )
226             if self._toStore("ForecastState"):
227                 self.StoredVariables["ForecastState"].store( Xn_predicted )
228             if self._toStore("BMA"):
229                 self.StoredVariables["BMA"].store( Xn_predicted - Xa )
230             if self._toStore("InnovationAtCurrentState"):
231                 self.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
232             if self._toStore("SimulatedObservationAtCurrentState") \
233                 or self._toStore("SimulatedObservationAtCurrentOptimum"):
234                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
235             # ---> autres
236             if self._parameters["StoreInternalVariables"] \
237                 or self._toStore("CostFunctionJ") \
238                 or self._toStore("CostFunctionJb") \
239                 or self._toStore("CostFunctionJo") \
240                 or self._toStore("CurrentOptimum") \
241                 or self._toStore("APosterioriCovariance"):
242                 Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
243                 Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
244                 J   = Jb + Jo
245                 self.StoredVariables["CostFunctionJb"].store( Jb )
246                 self.StoredVariables["CostFunctionJo"].store( Jo )
247                 self.StoredVariables["CostFunctionJ" ].store( J )
248                 #
249                 if self._toStore("IndexOfOptimum") \
250                     or self._toStore("CurrentOptimum") \
251                     or self._toStore("CostFunctionJAtCurrentOptimum") \
252                     or self._toStore("CostFunctionJbAtCurrentOptimum") \
253                     or self._toStore("CostFunctionJoAtCurrentOptimum") \
254                     or self._toStore("SimulatedObservationAtCurrentOptimum"):
255                     IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
256                 if self._toStore("IndexOfOptimum"):
257                     self.StoredVariables["IndexOfOptimum"].store( IndexMin )
258                 if self._toStore("CurrentOptimum"):
259                     self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["Analysis"][IndexMin] )
260                 if self._toStore("SimulatedObservationAtCurrentOptimum"):
261                     self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
262                 if self._toStore("CostFunctionJbAtCurrentOptimum"):
263                     self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
264                 if self._toStore("CostFunctionJoAtCurrentOptimum"):
265                     self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
266                 if self._toStore("CostFunctionJAtCurrentOptimum"):
267                     self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
268             if self._toStore("APosterioriCovariance"):
269                 Pn = 0.
270                 for i in range(__m):
271                     Eai = Xn[:,i] - Xa
272                     Pn += Eai * Eai.T
273                 Pn  = (1./(__m-1)) * Pn
274                 self.StoredVariables["APosterioriCovariance"].store( Pn )
275             if self._parameters["EstimationOf"] == "Parameters" \
276                 and J < previousJMinimum:
277                 previousJMinimum    = J
278                 XaMin               = Xa
279                 if self._toStore("APosterioriCovariance"):
280                     covarianceXaMin = Pn
281         #
282         # Stockage final supplémentaire de l'optimum en estimation de paramètres
283         # ----------------------------------------------------------------------
284         if self._parameters["EstimationOf"] == "Parameters":
285             self.StoredVariables["Analysis"].store( XaMin )
286             if self._toStore("APosterioriCovariance"):
287                 self.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
288             if self._toStore("BMA"):
289                 self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
290         #
291         self._post_run(HO)
292         return 0
293
294 # ==============================================================================
295 if __name__ == "__main__":
296     print('\n AUTODIAGNOSTIC\n')