Salome HOME
Compatibility fix for algorithm output variables clarity
[modules/adao.git] / src / daComposant / daAlgorithms / EnsembleKalmanFilter.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2019 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects, PlatformInfo
25 import numpy, math
26 mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
27
28 # ==============================================================================
29 class ElementaryAlgorithm(BasicObjects.Algorithm):
30     def __init__(self):
31         BasicObjects.Algorithm.__init__(self, "ENSEMBLEKALMANFILTER")
32         self.defineRequiredParameter(
33             name     = "NumberOfMembers",
34             default  = 100,
35             typecast = int,
36             message  = "Nombre de membres dans l'ensemble",
37             minval   = -1,
38             )
39         self.defineRequiredParameter(
40             name     = "EstimationOf",
41             default  = "State",
42             typecast = str,
43             message  = "Estimation d'etat ou de parametres",
44             listval  = ["State", "Parameters"],
45             )
46         self.defineRequiredParameter(
47             name     = "SetSeed",
48             typecast = numpy.random.seed,
49             message  = "Graine fixée pour le générateur aléatoire",
50             )
51         self.defineRequiredParameter(
52             name     = "StoreInternalVariables",
53             default  = False,
54             typecast = bool,
55             message  = "Stockage des variables internes ou intermédiaires du calcul",
56             )
57         self.defineRequiredParameter(
58             name     = "StoreSupplementaryCalculations",
59             default  = [],
60             typecast = tuple,
61             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
62             listval  = [
63                 "APosterioriCorrelations",
64                 "APosterioriCovariance",
65                 "APosterioriStandardDeviations",
66                 "APosterioriVariances",
67                 "BMA",
68                 "CostFunctionJ",
69                 "CostFunctionJb",
70                 "CostFunctionJo",
71                 "CurrentState",
72                 "Innovation",
73                 ]
74             )
75         self.requireInputArguments(
76             mandatory= ("Xb", "Y", "HO", "R", "B"),
77             optional = ("U", "EM", "CM", "Q"),
78             )
79
80     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
81         self._pre_run(Parameters, Xb, Y, R, B, Q)
82         #
83         if self._parameters["EstimationOf"] == "Parameters":
84             self._parameters["StoreInternalVariables"] = True
85         #
86         # Opérateurs
87         # ----------
88         H = HO["Direct"].appliedControledFormTo
89         #
90         if self._parameters["EstimationOf"] == "State":
91             M = EM["Direct"].appliedControledFormTo
92         #
93         if CM is not None and "Tangent" in CM and U is not None:
94             Cm = CM["Tangent"].asMatrix(Xb)
95         else:
96             Cm = None
97         #
98         # Nombre de pas identique au nombre de pas d'observations
99         # -------------------------------------------------------
100         if hasattr(Y,"stepnumber"):
101             duration = Y.stepnumber()
102             __p = numpy.cumprod(Y.shape())[-1]
103         else:
104             duration = 2
105             __p = numpy.array(Y).size
106         #
107         # Précalcul des inversions de B et R
108         # ----------------------------------
109         if self._parameters["StoreInternalVariables"] or \
110             self._toStore("CostFunctionJ") or \
111             self._toStore("CostFunctionJb") or \
112             self._toStore("CostFunctionJo") or \
113             self._toStore("APosterioriCovariance"):
114             BI = B.getI()
115             RI = R.getI()
116         BIdemi = B.choleskyI()
117         RIdemi = R.choleskyI()
118         #
119         # Initialisation
120         # --------------
121         __n = Xb.size
122         __m = self._parameters["NumberOfMembers"]
123         Xn = numpy.asmatrix(numpy.dot( Xb.reshape(__n,1), numpy.ones((1,__m)) ))
124         if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
125         else:                         Pn = B
126         if hasattr(R,"asfullmatrix"): Rn = R.asfullmatrix(__p)
127         else:                         Rn = R
128         if hasattr(Q,"asfullmatrix"): Qn = Q.asfullmatrix(__n)
129         else:                         Qn = Q
130         #
131         self.StoredVariables["Analysis"].store( Xb.A1 )
132         if self._toStore("APosterioriCovariance"):
133             self.StoredVariables["APosterioriCovariance"].store( Pn )
134             covarianceXa = Pn
135         Xa               = Xb
136         previousJMinimum = numpy.finfo(float).max
137         #
138         # Predimensionnement
139         Xn_predicted = numpy.asmatrix(numpy.zeros((__n,__m)))
140         HX_predicted = numpy.asmatrix(numpy.zeros((__p,__m)))
141         #
142         for step in range(duration-1):
143             if hasattr(Y,"store"):
144                 Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
145             else:
146                 Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
147             #
148             if U is not None:
149                 if hasattr(U,"store") and len(U)>1:
150                     Un = numpy.asmatrix(numpy.ravel( U[step] )).T
151                 elif hasattr(U,"store") and len(U)==1:
152                     Un = numpy.asmatrix(numpy.ravel( U[0] )).T
153                 else:
154                     Un = numpy.asmatrix(numpy.ravel( U )).T
155             else:
156                 Un = None
157             #
158             if self._parameters["EstimationOf"] == "State":
159                 for i in range(__m):
160                     qi = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__n), Qn)).T
161                     Xn_predicted[:,i] = numpy.asmatrix(numpy.ravel( M((Xn[:,i], Un)) )).T + qi
162                     HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( H((Xn_predicted[:,i], Un)) )).T
163                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
164                     Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
165                     Xn_predicted = Xn_predicted + Cm * Un
166             elif self._parameters["EstimationOf"] == "Parameters":
167                 # --- > Par principe, M = Id, Q = 0
168                 Xn_predicted = Xn
169             #
170             Xfm = numpy.asmatrix(numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp))).T
171             Hfm = numpy.asmatrix(numpy.ravel(HX_predicted.mean(axis=1, dtype=mfp))).T
172             Af  = Xn_predicted - Xfm
173             Hf  = HX_predicted - Hfm
174             #
175             PfHT, HPfHT = 0., 0.
176             for i in range(__m):
177                 PfHT  += Af[:,i] * Hf[:,i].T
178                 HPfHT += Hf[:,i] * Hf[:,i].T
179             PfHT  = (1./(__m-1)) * PfHT
180             HPfHT = (1./(__m-1)) * HPfHT
181             #
182             K = PfHT * ( R + HPfHT ).I
183             #
184             Yo = numpy.asmatrix(numpy.zeros((__p,__m)))
185             for i in range(__m):
186                 ri = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__p), Rn)).T
187                 Yo[:,i] = Ynpu + ri
188             #
189             for i in range(__m):
190                 Xn[:,i] = Xn_predicted[:,i] + K * (Yo[:,i] - HX_predicted[:,i])
191             #
192             Xa = Xn.mean(axis=1, dtype=mfp)
193             self.StoredVariables["Analysis"].store( Xa )
194             #
195             del Yo, PfHT, HPfHT
196             if self._parameters["StoreInternalVariables"] or \
197                 self._toStore("CostFunctionJ") or \
198                 self._toStore("CostFunctionJb") or \
199                 self._toStore("CostFunctionJo") or \
200                 self._toStore("APosterioriCovariance") or \
201                 self._toStore("Innovation"):
202                 d = Ynpu - numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
203                 self.StoredVariables["Innovation"].store( d )
204             if self._parameters["StoreInternalVariables"] \
205                 or self._toStore("CurrentState"):
206                 self.StoredVariables["CurrentState"].store( Xn )
207             if self._parameters["StoreInternalVariables"] or \
208                 self._toStore("CostFunctionJ") or \
209                 self._toStore("CostFunctionJb") or \
210                 self._toStore("CostFunctionJo") or \
211                 self._toStore("APosterioriCovariance"):
212                 Jb  = 0.5 * (Xa - Xb).T * BI * (Xa - Xb)
213                 Jo  = 0.5 * d.T * RI * d
214                 J   = float( Jb ) + float( Jo )
215                 self.StoredVariables["CostFunctionJb"].store( Jb )
216                 self.StoredVariables["CostFunctionJo"].store( Jo )
217                 self.StoredVariables["CostFunctionJ" ].store( J )
218             if self._toStore("APosterioriCovariance"):
219                 Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
220                 Ht = Ht.reshape(__p,__n) # ADAO & check shape
221                 Pf = 0.
222                 for i in range(__m):
223                     Pf += Af[:,i] * Af[:,i].T
224                 Pf = (1./(__m-1)) * Pf
225                 Pn = (1. - K * Ht) * Pf
226                 self.StoredVariables["APosterioriCovariance"].store( Pn )
227                 if J < previousJMinimum:
228                     previousJMinimum  = J
229                     Xa                = Xn
230                     covarianceXa      = Pn
231         #
232         # Stockage supplementaire de l'optimum en estimation de parametres
233         # ----------------------------------------------------------------
234         if self._parameters["EstimationOf"] == "Parameters":
235             self.StoredVariables["Analysis"].store( Xa.A1 )
236             if self._toStore("APosterioriCovariance"):
237                 self.StoredVariables["APosterioriCovariance"].store( covarianceXa )
238         #
239         if self._toStore("BMA"):
240             self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
241         #
242         self._post_run(HO)
243         return 0
244
245 # ==============================================================================
246 if __name__ == "__main__":
247     print('\n AUTODIAGNOSTIC \n')