Salome HOME
Improvement of algorithms arguments validation and tests
[modules/adao.git] / src / daComposant / daAlgorithms / EnsembleKalmanFilter.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2020 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects, PlatformInfo
25 import numpy, math
26 mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
27
28 # ==============================================================================
29 class ElementaryAlgorithm(BasicObjects.Algorithm):
30     def __init__(self):
31         BasicObjects.Algorithm.__init__(self, "ENSEMBLEKALMANFILTER")
32         self.defineRequiredParameter(
33             name     = "NumberOfMembers",
34             default  = 100,
35             typecast = int,
36             message  = "Nombre de membres dans l'ensemble",
37             minval   = -1,
38             )
39         self.defineRequiredParameter(
40             name     = "EstimationOf",
41             default  = "State",
42             typecast = str,
43             message  = "Estimation d'etat ou de parametres",
44             listval  = ["State", "Parameters"],
45             )
46         self.defineRequiredParameter(
47             name     = "SetSeed",
48             typecast = numpy.random.seed,
49             message  = "Graine fixée pour le générateur aléatoire",
50             )
51         self.defineRequiredParameter(
52             name     = "StoreInternalVariables",
53             default  = False,
54             typecast = bool,
55             message  = "Stockage des variables internes ou intermédiaires du calcul",
56             )
57         self.defineRequiredParameter(
58             name     = "StoreSupplementaryCalculations",
59             default  = [],
60             typecast = tuple,
61             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
62             listval  = [
63                 "Analysis",
64                 "APosterioriCorrelations",
65                 "APosterioriCovariance",
66                 "APosterioriStandardDeviations",
67                 "APosterioriVariances",
68                 "BMA",
69                 "CostFunctionJ",
70                 "CostFunctionJAtCurrentOptimum",
71                 "CostFunctionJb",
72                 "CostFunctionJbAtCurrentOptimum",
73                 "CostFunctionJo",
74                 "CostFunctionJoAtCurrentOptimum",
75                 "CurrentOptimum",
76                 "CurrentState",
77                 "ForecastState",
78                 "IndexOfOptimum",
79                 "InnovationAtCurrentAnalysis",
80                 "InnovationAtCurrentState",
81                 "SimulatedObservationAtCurrentAnalysis",
82                 "SimulatedObservationAtCurrentOptimum",
83                 "SimulatedObservationAtCurrentState",
84                 ]
85             )
86         self.requireInputArguments(
87             mandatory= ("Xb", "Y", "HO", "R", "B"),
88             optional = ("U", "EM", "CM", "Q"),
89             )
90         self.setAttributes(tags=(
91             "DataAssimilation",
92             "NonLinear",
93             "Filter",
94             "Ensemble",
95             "Dynamic",
96             ))
97
98     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
99         self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
100         #
101         if self._parameters["EstimationOf"] == "Parameters":
102             self._parameters["StoreInternalVariables"] = True
103         #
104         # Opérateurs
105         # ----------
106         H = HO["Direct"].appliedControledFormTo
107         #
108         if self._parameters["EstimationOf"] == "State":
109             M = EM["Direct"].appliedControledFormTo
110         #
111         if CM is not None and "Tangent" in CM and U is not None:
112             Cm = CM["Tangent"].asMatrix(Xb)
113         else:
114             Cm = None
115         #
116         # Nombre de pas identique au nombre de pas d'observations
117         # -------------------------------------------------------
118         if hasattr(Y,"stepnumber"):
119             duration = Y.stepnumber()
120             __p = numpy.cumprod(Y.shape())[-1]
121         else:
122             duration = 2
123             __p = numpy.array(Y).size
124         #
125         # Précalcul des inversions de B et R
126         # ----------------------------------
127         if self._parameters["StoreInternalVariables"] \
128             or self._toStore("CostFunctionJ") \
129             or self._toStore("CostFunctionJb") \
130             or self._toStore("CostFunctionJo") \
131             or self._toStore("CurrentOptimum") \
132             or self._toStore("APosterioriCovariance"):
133             BI = B.getI()
134             RI = R.getI()
135         # BIdemi = B.choleskyI()
136         # RIdemi = R.choleskyI()
137         #
138         # Initialisation
139         # --------------
140         __n = Xb.size
141         __m = self._parameters["NumberOfMembers"]
142         Xn = numpy.asmatrix(numpy.dot( Xb.reshape(__n,1), numpy.ones((1,__m)) ))
143         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
144         else:                         Pn = B
145         if hasattr(R,"asfullmatrix"): Rn = R.asfullmatrix(__p)
146         else:                         Rn = R
147         if hasattr(Q,"asfullmatrix"): Qn = Q.asfullmatrix(__n)
148         else:                         Qn = Q
149         #
150         if len(self.StoredVariables["Analysis"])==0 or not self._parameters["nextStep"]:
151             self.StoredVariables["Analysis"].store( numpy.ravel(Xb) )
152             if self._toStore("APosterioriCovariance"):
153                 self.StoredVariables["APosterioriCovariance"].store( Pn )
154                 covarianceXa = Pn
155         #
156         Xa               = Xb
157         XaMin            = Xb
158         previousJMinimum = numpy.finfo(float).max
159         #
160         # Predimensionnement
161         Xn_predicted = numpy.asmatrix(numpy.zeros((__n,__m)))
162         HX_predicted = numpy.asmatrix(numpy.zeros((__p,__m)))
163         #
164         for step in range(duration-1):
165             if hasattr(Y,"store"):
166                 Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
167             else:
168                 Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
169             #
170             if U is not None:
171                 if hasattr(U,"store") and len(U)>1:
172                     Un = numpy.asmatrix(numpy.ravel( U[step] )).T
173                 elif hasattr(U,"store") and len(U)==1:
174                     Un = numpy.asmatrix(numpy.ravel( U[0] )).T
175                 else:
176                     Un = numpy.asmatrix(numpy.ravel( U )).T
177             else:
178                 Un = None
179             #
180             if self._parameters["EstimationOf"] == "State":
181                 for i in range(__m):
182                     qi = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__n), Qn, (1,1,1))).T
183                     Xn_predicted[:,i] = numpy.asmatrix(numpy.ravel( M((Xn[:,i], Un)) )).T + qi
184                     HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( H((Xn_predicted[:,i], Un)) )).T
185                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
186                     Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
187                     Xn_predicted = Xn_predicted + Cm * Un
188             elif self._parameters["EstimationOf"] == "Parameters":
189                 # --- > Par principe, M = Id, Q = 0
190                 Xn_predicted = Xn
191             #
192             Xfm = numpy.asmatrix(numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp))).T
193             Hfm = numpy.asmatrix(numpy.ravel(HX_predicted.mean(axis=1, dtype=mfp))).T
194             #
195             PfHT, HPfHT = 0., 0.
196             for i in range(__m):
197                 Exfi = Xn_predicted[:,i] - Xfm
198                 Eyfi = HX_predicted[:,i] - Hfm
199                 PfHT  += Exfi * Eyfi.T
200                 HPfHT += Eyfi * Eyfi.T
201             PfHT  = (1./(__m-1)) * PfHT
202             HPfHT = (1./(__m-1)) * HPfHT
203             K     = PfHT * ( R + HPfHT ).I
204             del PfHT, HPfHT
205             #
206             for i in range(__m):
207                 ri = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__p), Rn, (1,1,1))).T
208                 Xn[:,i] = Xn_predicted[:,i] + K * (Ynpu + ri - HX_predicted[:,i])
209             #
210             Xa = Xn.mean(axis=1, dtype=mfp)
211             #
212             if self._parameters["StoreInternalVariables"] \
213                 or self._toStore("CostFunctionJ") \
214                 or self._toStore("CostFunctionJb") \
215                 or self._toStore("CostFunctionJo") \
216                 or self._toStore("APosterioriCovariance") \
217                 or self._toStore("InnovationAtCurrentAnalysis") \
218                 or self._toStore("SimulatedObservationAtCurrentAnalysis") \
219                 or self._toStore("SimulatedObservationAtCurrentOptimum"):
220                 _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
221                 _Innovation = Ynpu - _HXa
222             #
223             # ---> avec analysis
224             self.StoredVariables["Analysis"].store( Xa )
225             if self._toStore("SimulatedObservationAtCurrentAnalysis"):
226                 self.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
227             if self._toStore("InnovationAtCurrentAnalysis"):
228                 self.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
229             # ---> avec current state
230             if self._parameters["StoreInternalVariables"] \
231                 or self._toStore("CurrentState"):
232                 self.StoredVariables["CurrentState"].store( Xn )
233             if self._toStore("ForecastState"):
234                 self.StoredVariables["ForecastState"].store( Xn_predicted )
235             if self._toStore("BMA"):
236                 self.StoredVariables["BMA"].store( Xn_predicted - Xa )
237             if self._toStore("InnovationAtCurrentState"):
238                 self.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
239             if self._toStore("SimulatedObservationAtCurrentState") \
240                 or self._toStore("SimulatedObservationAtCurrentOptimum"):
241                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
242             # ---> autres
243             if self._parameters["StoreInternalVariables"] \
244                 or self._toStore("CostFunctionJ") \
245                 or self._toStore("CostFunctionJb") \
246                 or self._toStore("CostFunctionJo") \
247                 or self._toStore("CurrentOptimum") \
248                 or self._toStore("APosterioriCovariance"):
249                 Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
250                 Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
251                 J   = Jb + Jo
252                 self.StoredVariables["CostFunctionJb"].store( Jb )
253                 self.StoredVariables["CostFunctionJo"].store( Jo )
254                 self.StoredVariables["CostFunctionJ" ].store( J )
255                 #
256                 if self._toStore("IndexOfOptimum") \
257                     or self._toStore("CurrentOptimum") \
258                     or self._toStore("CostFunctionJAtCurrentOptimum") \
259                     or self._toStore("CostFunctionJbAtCurrentOptimum") \
260                     or self._toStore("CostFunctionJoAtCurrentOptimum") \
261                     or self._toStore("SimulatedObservationAtCurrentOptimum"):
262                     IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
263                 if self._toStore("IndexOfOptimum"):
264                     self.StoredVariables["IndexOfOptimum"].store( IndexMin )
265                 if self._toStore("CurrentOptimum"):
266                     self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["Analysis"][IndexMin] )
267                 if self._toStore("SimulatedObservationAtCurrentOptimum"):
268                     self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
269                 if self._toStore("CostFunctionJbAtCurrentOptimum"):
270                     self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
271                 if self._toStore("CostFunctionJoAtCurrentOptimum"):
272                     self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
273                 if self._toStore("CostFunctionJAtCurrentOptimum"):
274                     self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
275             if self._toStore("APosterioriCovariance"):
276                 Pn = 0.
277                 for i in range(__m):
278                     Eai = Xn[:,i] - Xa
279                     Pn += Eai * Eai.T
280                 Pn  = (1./(__m-1)) * Pn
281                 self.StoredVariables["APosterioriCovariance"].store( Pn )
282             if self._parameters["EstimationOf"] == "Parameters" \
283                 and J < previousJMinimum:
284                 previousJMinimum    = J
285                 XaMin               = Xa
286                 if self._toStore("APosterioriCovariance"):
287                     covarianceXaMin = Pn
288         #
289         # Stockage final supplémentaire de l'optimum en estimation de paramètres
290         # ----------------------------------------------------------------------
291         if self._parameters["EstimationOf"] == "Parameters":
292             self.StoredVariables["Analysis"].store( XaMin )
293             if self._toStore("APosterioriCovariance"):
294                 self.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
295             if self._toStore("BMA"):
296                 self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
297         #
298         self._post_run(HO)
299         return 0
300
301 # ==============================================================================
302 if __name__ == "__main__":
303     print('\n AUTODIAGNOSTIC\n')