1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2019 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 from daCore import BasicObjects, PlatformInfo
26 mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
28 # ==============================================================================
29 class ElementaryAlgorithm(BasicObjects.Algorithm):
31 BasicObjects.Algorithm.__init__(self, "ENSEMBLEKALMANFILTER")
32 self.defineRequiredParameter(
33 name = "NumberOfMembers",
36 message = "Nombre de membres dans l'ensemble",
39 self.defineRequiredParameter(
40 name = "EstimationOf",
43 message = "Estimation d'etat ou de parametres",
44 listval = ["State", "Parameters"],
46 self.defineRequiredParameter(
48 typecast = numpy.random.seed,
49 message = "Graine fixée pour le générateur aléatoire",
51 self.defineRequiredParameter(
52 name = "StoreInternalVariables",
55 message = "Stockage des variables internes ou intermédiaires du calcul",
57 self.defineRequiredParameter(
58 name = "StoreSupplementaryCalculations",
61 message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
63 "APosterioriCorrelations",
64 "APosterioriCovariance",
65 "APosterioriStandardDeviations",
66 "APosterioriVariances",
75 self.requireInputArguments(
76 mandatory= ("Xb", "Y", "HO", "R", "B"),
77 optional = ("U", "EM", "CM", "Q"),
80 def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
81 self._pre_run(Parameters, Xb, Y, R, B, Q)
83 if self._parameters["EstimationOf"] == "Parameters":
84 self._parameters["StoreInternalVariables"] = True
88 H = HO["Direct"].appliedControledFormTo
90 if self._parameters["EstimationOf"] == "State":
91 M = EM["Direct"].appliedControledFormTo
93 if CM is not None and "Tangent" in CM and U is not None:
94 Cm = CM["Tangent"].asMatrix(Xb)
98 # Nombre de pas identique au nombre de pas d'observations
99 # -------------------------------------------------------
100 if hasattr(Y,"stepnumber"):
101 duration = Y.stepnumber()
102 __p = numpy.cumprod(Y.shape())[-1]
105 __p = numpy.array(Y).size
107 # Précalcul des inversions de B et R
108 # ----------------------------------
109 if self._parameters["StoreInternalVariables"] or \
110 self._toStore("CostFunctionJ") or \
111 self._toStore("CostFunctionJb") or \
112 self._toStore("CostFunctionJo") or \
113 self._toStore("APosterioriCovariance"):
116 BIdemi = B.choleskyI()
117 RIdemi = R.choleskyI()
122 __m = self._parameters["NumberOfMembers"]
123 Xn = numpy.asmatrix(numpy.dot( Xb.reshape(__n,1), numpy.ones((1,__m)) ))
124 if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
126 if hasattr(R,"asfullmatrix"): Rn = R.asfullmatrix(__p)
128 if hasattr(Q,"asfullmatrix"): Qn = Q.asfullmatrix(__n)
131 self.StoredVariables["Analysis"].store( Xb.A1 )
132 if self._toStore("APosterioriCovariance"):
133 self.StoredVariables["APosterioriCovariance"].store( Pn )
136 previousJMinimum = numpy.finfo(float).max
139 Xn_predicted = numpy.asmatrix(numpy.zeros((__n,__m)))
140 HX_predicted = numpy.asmatrix(numpy.zeros((__p,__m)))
142 for step in range(duration-1):
143 if hasattr(Y,"store"):
144 Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
146 Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
149 if hasattr(U,"store") and len(U)>1:
150 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
151 elif hasattr(U,"store") and len(U)==1:
152 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
154 Un = numpy.asmatrix(numpy.ravel( U )).T
158 if self._parameters["EstimationOf"] == "State":
160 qi = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__n), Qn)).T
161 Xn_predicted[:,i] = numpy.asmatrix(numpy.ravel( M((Xn[:,i], Un)) )).T + qi
162 HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( H((Xn_predicted[:,i], Un)) )).T
163 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
164 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
165 Xn_predicted = Xn_predicted + Cm * Un
166 elif self._parameters["EstimationOf"] == "Parameters":
167 # --- > Par principe, M = Id, Q = 0
170 Xfm = numpy.asmatrix(numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp))).T
171 Hfm = numpy.asmatrix(numpy.ravel(HX_predicted.mean(axis=1, dtype=mfp))).T
172 Af = Xn_predicted - Xfm
173 Hf = HX_predicted - Hfm
177 PfHT += Af[:,i] * Hf[:,i].T
178 HPfHT += Hf[:,i] * Hf[:,i].T
179 PfHT = (1./(__m-1)) * PfHT
180 HPfHT = (1./(__m-1)) * HPfHT
182 K = PfHT * ( R + HPfHT ).I
184 Yo = numpy.asmatrix(numpy.zeros((__p,__m)))
186 ri = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__p), Rn)).T
190 Xn[:,i] = Xn_predicted[:,i] + K * (Yo[:,i] - HX_predicted[:,i])
192 Xa = Xn.mean(axis=1, dtype=mfp)
193 self.StoredVariables["Analysis"].store( Xa )
196 if self._parameters["StoreInternalVariables"] or \
197 self._toStore("CostFunctionJ") or \
198 self._toStore("CostFunctionJb") or \
199 self._toStore("CostFunctionJo") or \
200 self._toStore("APosterioriCovariance") or \
201 self._toStore("Innovation"):
202 d = Ynpu - numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
203 self.StoredVariables["Innovation"].store( d )
204 if self._parameters["StoreInternalVariables"] \
205 or self._toStore("CurrentState"):
206 self.StoredVariables["CurrentState"].store( Xn )
207 if self._parameters["StoreInternalVariables"] or \
208 self._toStore("CostFunctionJ") or \
209 self._toStore("CostFunctionJb") or \
210 self._toStore("CostFunctionJo") or \
211 self._toStore("APosterioriCovariance"):
212 Jb = 0.5 * (Xa - Xb).T * BI * (Xa - Xb)
213 Jo = 0.5 * d.T * RI * d
214 J = float( Jb ) + float( Jo )
215 self.StoredVariables["CostFunctionJb"].store( Jb )
216 self.StoredVariables["CostFunctionJo"].store( Jo )
217 self.StoredVariables["CostFunctionJ" ].store( J )
218 if self._toStore("APosterioriCovariance"):
219 Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
220 Ht = Ht.reshape(__p,__n) # ADAO & check shape
223 Pf += Af[:,i] * Af[:,i].T
224 Pf = (1./(__m-1)) * Pf
225 Pn = (1. - K * Ht) * Pf
226 self.StoredVariables["APosterioriCovariance"].store( Pn )
227 if J < previousJMinimum:
232 # Stockage supplementaire de l'optimum en estimation de parametres
233 # ----------------------------------------------------------------
234 if self._parameters["EstimationOf"] == "Parameters":
235 self.StoredVariables["Analysis"].store( Xa.A1 )
236 if self._toStore("APosterioriCovariance"):
237 self.StoredVariables["APosterioriCovariance"].store( covarianceXa )
239 if self._toStore("BMA"):
240 self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
245 # ==============================================================================
246 if __name__ == "__main__":
247 print('\n AUTODIAGNOSTIC \n')