Salome HOME
Improvement of documentation and variables output for filters
[modules/adao.git] / src / daComposant / daAlgorithms / EnsembleKalmanFilter.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2019 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects, PlatformInfo
25 import numpy, math
26 mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
27
28 # ==============================================================================
29 class ElementaryAlgorithm(BasicObjects.Algorithm):
30     def __init__(self):
31         BasicObjects.Algorithm.__init__(self, "ENSEMBLEKALMANFILTER")
32         self.defineRequiredParameter(
33             name     = "NumberOfMembers",
34             default  = 100,
35             typecast = int,
36             message  = "Nombre de membres dans l'ensemble",
37             minval   = -1,
38             )
39         self.defineRequiredParameter(
40             name     = "EstimationOf",
41             default  = "State",
42             typecast = str,
43             message  = "Estimation d'etat ou de parametres",
44             listval  = ["State", "Parameters"],
45             )
46         self.defineRequiredParameter(
47             name     = "SetSeed",
48             typecast = numpy.random.seed,
49             message  = "Graine fixée pour le générateur aléatoire",
50             )
51         self.defineRequiredParameter(
52             name     = "StoreInternalVariables",
53             default  = False,
54             typecast = bool,
55             message  = "Stockage des variables internes ou intermédiaires du calcul",
56             )
57         self.defineRequiredParameter(
58             name     = "StoreSupplementaryCalculations",
59             default  = [],
60             typecast = tuple,
61             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
62             listval  = [
63                 "Analysis",
64                 "APosterioriCorrelations",
65                 "APosterioriCovariance",
66                 "APosterioriStandardDeviations",
67                 "APosterioriVariances",
68                 "BMA",
69                 "CostFunctionJ",
70                 "CostFunctionJAtCurrentOptimum",
71                 "CostFunctionJb",
72                 "CostFunctionJbAtCurrentOptimum",
73                 "CostFunctionJo",
74                 "CostFunctionJoAtCurrentOptimum",
75                 "CurrentOptimum",
76                 "CurrentState",
77                 "IndexOfOptimum",
78                 "InnovationAtCurrentAnalysis",
79                 "InnovationAtCurrentState",
80                 "PredictedState",
81                 "SimulatedObservationAtCurrentAnalysis",
82                 "SimulatedObservationAtCurrentOptimum",
83                 "SimulatedObservationAtCurrentState",
84                 ]
85             )
86         self.requireInputArguments(
87             mandatory= ("Xb", "Y", "HO", "R", "B"),
88             optional = ("U", "EM", "CM", "Q"),
89             )
90
91     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
92         self._pre_run(Parameters, Xb, Y, R, B, Q)
93         #
94         if self._parameters["EstimationOf"] == "Parameters":
95             self._parameters["StoreInternalVariables"] = True
96         #
97         # Opérateurs
98         # ----------
99         H = HO["Direct"].appliedControledFormTo
100         #
101         if self._parameters["EstimationOf"] == "State":
102             M = EM["Direct"].appliedControledFormTo
103         #
104         if CM is not None and "Tangent" in CM and U is not None:
105             Cm = CM["Tangent"].asMatrix(Xb)
106         else:
107             Cm = None
108         #
109         # Nombre de pas identique au nombre de pas d'observations
110         # -------------------------------------------------------
111         if hasattr(Y,"stepnumber"):
112             duration = Y.stepnumber()
113             __p = numpy.cumprod(Y.shape())[-1]
114         else:
115             duration = 2
116             __p = numpy.array(Y).size
117         #
118         # Précalcul des inversions de B et R
119         # ----------------------------------
120         if self._parameters["StoreInternalVariables"] \
121             or self._toStore("CostFunctionJ") \
122             or self._toStore("CostFunctionJb") \
123             or self._toStore("CostFunctionJo") \
124             or self._toStore("CurrentOptimum") \
125             or self._toStore("APosterioriCovariance"):
126             BI = B.getI()
127             RI = R.getI()
128         BIdemi = B.choleskyI()
129         RIdemi = R.choleskyI()
130         #
131         # Initialisation
132         # --------------
133         __n = Xb.size
134         __m = self._parameters["NumberOfMembers"]
135         Xn = numpy.asmatrix(numpy.dot( Xb.reshape(__n,1), numpy.ones((1,__m)) ))
136         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
137         else:                         Pn = B
138         if hasattr(R,"asfullmatrix"): Rn = R.asfullmatrix(__p)
139         else:                         Rn = R
140         if hasattr(Q,"asfullmatrix"): Qn = Q.asfullmatrix(__n)
141         else:                         Qn = Q
142         #
143         self.StoredVariables["Analysis"].store( Xb.A1 )
144         if self._toStore("APosterioriCovariance"):
145             self.StoredVariables["APosterioriCovariance"].store( Pn )
146             covarianceXa = Pn
147         Xa = XaMin       = Xb
148         previousJMinimum = numpy.finfo(float).max
149         #
150         # Predimensionnement
151         Xn_predicted = numpy.asmatrix(numpy.zeros((__n,__m)))
152         HX_predicted = numpy.asmatrix(numpy.zeros((__p,__m)))
153         #
154         for step in range(duration-1):
155             if hasattr(Y,"store"):
156                 Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
157             else:
158                 Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
159             #
160             if U is not None:
161                 if hasattr(U,"store") and len(U)>1:
162                     Un = numpy.asmatrix(numpy.ravel( U[step] )).T
163                 elif hasattr(U,"store") and len(U)==1:
164                     Un = numpy.asmatrix(numpy.ravel( U[0] )).T
165                 else:
166                     Un = numpy.asmatrix(numpy.ravel( U )).T
167             else:
168                 Un = None
169             #
170             if self._parameters["EstimationOf"] == "State":
171                 for i in range(__m):
172                     qi = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__n), Qn)).T
173                     Xn_predicted[:,i] = numpy.asmatrix(numpy.ravel( M((Xn[:,i], Un)) )).T + qi
174                     HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( H((Xn_predicted[:,i], Un)) )).T
175                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
176                     Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
177                     Xn_predicted = Xn_predicted + Cm * Un
178             elif self._parameters["EstimationOf"] == "Parameters":
179                 # --- > Par principe, M = Id, Q = 0
180                 Xn_predicted = Xn
181             #
182             Xfm = numpy.asmatrix(numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp))).T
183             Hfm = numpy.asmatrix(numpy.ravel(HX_predicted.mean(axis=1, dtype=mfp))).T
184             Af  = Xn_predicted - Xfm
185             Hf  = HX_predicted - Hfm
186             #
187             PfHT, HPfHT = 0., 0.
188             for i in range(__m):
189                 PfHT  += Af[:,i] * Hf[:,i].T
190                 HPfHT += Hf[:,i] * Hf[:,i].T
191             PfHT  = (1./(__m-1)) * PfHT
192             HPfHT = (1./(__m-1)) * HPfHT
193             #
194             K = PfHT * ( R + HPfHT ).I
195             #
196             Yo = numpy.asmatrix(numpy.zeros((__p,__m)))
197             for i in range(__m):
198                 ri = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__p), Rn)).T
199                 Yo[:,i] = Ynpu + ri
200             #
201             for i in range(__m):
202                 Xn[:,i] = Xn_predicted[:,i] + K * (Yo[:,i] - HX_predicted[:,i])
203             del PfHT, HPfHT
204             #
205             Xa = Xn.mean(axis=1, dtype=mfp)
206             #
207             if self._parameters["StoreInternalVariables"] \
208                 or self._toStore("CostFunctionJ") \
209                 or self._toStore("CostFunctionJb") \
210                 or self._toStore("CostFunctionJo") \
211                 or self._toStore("APosterioriCovariance") \
212                 or self._toStore("InnovationAtCurrentAnalysis") \
213                 or self._toStore("SimulatedObservationAtCurrentAnalysis") \
214                 or self._toStore("SimulatedObservationAtCurrentOptimum"):
215                 _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
216                 _Innovation = Ynpu - _HXa
217             #
218             # ---> avec analysis
219             self.StoredVariables["Analysis"].store( Xa )
220             if self._toStore("SimulatedObservationAtCurrentAnalysis"):
221                 self.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
222             if self._toStore("InnovationAtCurrentAnalysis"):
223                 self.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
224             # ---> avec current state
225             if self._parameters["StoreInternalVariables"] \
226                 or self._toStore("CurrentState"):
227                 self.StoredVariables["CurrentState"].store( Xn )
228             if self._toStore("PredictedState"):
229                 self.StoredVariables["PredictedState"].store( Xn_predicted )
230             if self._toStore("BMA"):
231                 self.StoredVariables["BMA"].store( Xn_predicted - Xa )
232             if self._toStore("InnovationAtCurrentState"):
233                 self.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
234             if self._toStore("SimulatedObservationAtCurrentState") \
235                 or self._toStore("SimulatedObservationAtCurrentOptimum"):
236                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
237             # ---> autres
238             if self._parameters["StoreInternalVariables"] \
239                 or self._toStore("CostFunctionJ") \
240                 or self._toStore("CostFunctionJb") \
241                 or self._toStore("CostFunctionJo") \
242                 or self._toStore("CurrentOptimum") \
243                 or self._toStore("APosterioriCovariance"):
244                 Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
245                 Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
246                 J   = Jb + Jo
247                 self.StoredVariables["CostFunctionJb"].store( Jb )
248                 self.StoredVariables["CostFunctionJo"].store( Jo )
249                 self.StoredVariables["CostFunctionJ" ].store( J )
250                 #
251                 if self._toStore("IndexOfOptimum") \
252                     or self._toStore("CurrentOptimum") \
253                     or self._toStore("CostFunctionJAtCurrentOptimum") \
254                     or self._toStore("CostFunctionJbAtCurrentOptimum") \
255                     or self._toStore("CostFunctionJoAtCurrentOptimum") \
256                     or self._toStore("SimulatedObservationAtCurrentOptimum"):
257                     IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
258                 if self._toStore("IndexOfOptimum"):
259                     self.StoredVariables["IndexOfOptimum"].store( IndexMin )
260                 if self._toStore("CurrentOptimum"):
261                     self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["Analysis"][IndexMin] )
262                 if self._toStore("SimulatedObservationAtCurrentOptimum"):
263                     self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
264                 if self._toStore("CostFunctionJbAtCurrentOptimum"):
265                     self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
266                 if self._toStore("CostFunctionJoAtCurrentOptimum"):
267                     self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
268                 if self._toStore("CostFunctionJAtCurrentOptimum"):
269                     self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
270             if self._toStore("APosterioriCovariance"):
271                 Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
272                 Ht = Ht.reshape(__p,__n) # ADAO & check shape
273                 Pf = 0.
274                 for i in range(__m):
275                     Pf += Af[:,i] * Af[:,i].T
276                 Pf = (1./(__m-1)) * Pf
277                 Pn = (1. - K * Ht) * Pf
278                 self.StoredVariables["APosterioriCovariance"].store( Pn )
279             if self._parameters["EstimationOf"] == "Parameters" \
280                 and J < previousJMinimum:
281                 previousJMinimum    = J
282                 XaMin               = Xa
283                 if self._toStore("APosterioriCovariance"):
284                     covarianceXaMin = Pn
285         #
286         # Stockage final supplémentaire de l'optimum en estimation de paramètres
287         # ----------------------------------------------------------------------
288         if self._parameters["EstimationOf"] == "Parameters":
289             self.StoredVariables["Analysis"].store( XaMin )
290             if self._toStore("APosterioriCovariance"):
291                 self.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
292             if self._toStore("BMA"):
293                 self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
294         #
295         self._post_run(HO)
296         return 0
297
298 # ==============================================================================
299 if __name__ == "__main__":
300     print('\n AUTODIAGNOSTIC \n')