1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2020 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 from daCore import BasicObjects, PlatformInfo
26 mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
28 # ==============================================================================
29 class ElementaryAlgorithm(BasicObjects.Algorithm):
31 BasicObjects.Algorithm.__init__(self, "ENSEMBLEKALMANFILTER")
32 self.defineRequiredParameter(
33 name = "NumberOfMembers",
36 message = "Nombre de membres dans l'ensemble",
39 self.defineRequiredParameter(
40 name = "EstimationOf",
43 message = "Estimation d'etat ou de parametres",
44 listval = ["State", "Parameters"],
46 self.defineRequiredParameter(
48 typecast = numpy.random.seed,
49 message = "Graine fixée pour le générateur aléatoire",
51 self.defineRequiredParameter(
52 name = "StoreInternalVariables",
55 message = "Stockage des variables internes ou intermédiaires du calcul",
57 self.defineRequiredParameter(
58 name = "StoreSupplementaryCalculations",
61 message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
64 "APosterioriCorrelations",
65 "APosterioriCovariance",
66 "APosterioriStandardDeviations",
67 "APosterioriVariances",
70 "CostFunctionJAtCurrentOptimum",
72 "CostFunctionJbAtCurrentOptimum",
74 "CostFunctionJoAtCurrentOptimum",
79 "InnovationAtCurrentAnalysis",
80 "InnovationAtCurrentState",
81 "SimulatedObservationAtCurrentAnalysis",
82 "SimulatedObservationAtCurrentOptimum",
83 "SimulatedObservationAtCurrentState",
86 self.requireInputArguments(
87 mandatory= ("Xb", "Y", "HO", "R", "B"),
88 optional = ("U", "EM", "CM", "Q"),
90 self.setAttributes(tags=(
98 def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
99 self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
101 if self._parameters["EstimationOf"] == "Parameters":
102 self._parameters["StoreInternalVariables"] = True
106 H = HO["Direct"].appliedControledFormTo
108 if self._parameters["EstimationOf"] == "State":
109 M = EM["Direct"].appliedControledFormTo
111 if CM is not None and "Tangent" in CM and U is not None:
112 Cm = CM["Tangent"].asMatrix(Xb)
116 # Nombre de pas identique au nombre de pas d'observations
117 # -------------------------------------------------------
118 if hasattr(Y,"stepnumber"):
119 duration = Y.stepnumber()
120 __p = numpy.cumprod(Y.shape())[-1]
123 __p = numpy.array(Y).size
125 # Précalcul des inversions de B et R
126 # ----------------------------------
127 if self._parameters["StoreInternalVariables"] \
128 or self._toStore("CostFunctionJ") \
129 or self._toStore("CostFunctionJb") \
130 or self._toStore("CostFunctionJo") \
131 or self._toStore("CurrentOptimum") \
132 or self._toStore("APosterioriCovariance"):
135 # BIdemi = B.choleskyI()
136 # RIdemi = R.choleskyI()
141 __m = self._parameters["NumberOfMembers"]
142 Xn = numpy.asmatrix(numpy.dot( Xb.reshape(__n,1), numpy.ones((1,__m)) ))
143 if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
145 if hasattr(R,"asfullmatrix"): Rn = R.asfullmatrix(__p)
147 if hasattr(Q,"asfullmatrix"): Qn = Q.asfullmatrix(__n)
150 if len(self.StoredVariables["Analysis"])==0 or not self._parameters["nextStep"]:
151 self.StoredVariables["Analysis"].store( numpy.ravel(Xb) )
152 if self._toStore("APosterioriCovariance"):
153 self.StoredVariables["APosterioriCovariance"].store( Pn )
158 previousJMinimum = numpy.finfo(float).max
161 Xn_predicted = numpy.asmatrix(numpy.zeros((__n,__m)))
162 HX_predicted = numpy.asmatrix(numpy.zeros((__p,__m)))
164 for step in range(duration-1):
165 if hasattr(Y,"store"):
166 Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
168 Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
171 if hasattr(U,"store") and len(U)>1:
172 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
173 elif hasattr(U,"store") and len(U)==1:
174 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
176 Un = numpy.asmatrix(numpy.ravel( U )).T
180 if self._parameters["EstimationOf"] == "State":
182 qi = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__n), Qn, (1,1,1))).T
183 Xn_predicted[:,i] = numpy.asmatrix(numpy.ravel( M((Xn[:,i], Un)) )).T + qi
184 HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( H((Xn_predicted[:,i], Un)) )).T
185 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
186 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
187 Xn_predicted = Xn_predicted + Cm * Un
188 elif self._parameters["EstimationOf"] == "Parameters":
189 # --- > Par principe, M = Id, Q = 0
192 Xfm = numpy.asmatrix(numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp))).T
193 Hfm = numpy.asmatrix(numpy.ravel(HX_predicted.mean(axis=1, dtype=mfp))).T
197 Exfi = Xn_predicted[:,i] - Xfm
198 Eyfi = HX_predicted[:,i] - Hfm
199 PfHT += Exfi * Eyfi.T
200 HPfHT += Eyfi * Eyfi.T
201 PfHT = (1./(__m-1)) * PfHT
202 HPfHT = (1./(__m-1)) * HPfHT
203 K = PfHT * ( R + HPfHT ).I
207 ri = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__p), Rn, (1,1,1))).T
208 Xn[:,i] = Xn_predicted[:,i] + K * (Ynpu + ri - HX_predicted[:,i])
210 Xa = Xn.mean(axis=1, dtype=mfp)
212 if self._parameters["StoreInternalVariables"] \
213 or self._toStore("CostFunctionJ") \
214 or self._toStore("CostFunctionJb") \
215 or self._toStore("CostFunctionJo") \
216 or self._toStore("APosterioriCovariance") \
217 or self._toStore("InnovationAtCurrentAnalysis") \
218 or self._toStore("SimulatedObservationAtCurrentAnalysis") \
219 or self._toStore("SimulatedObservationAtCurrentOptimum"):
220 _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
221 _Innovation = Ynpu - _HXa
224 self.StoredVariables["Analysis"].store( Xa )
225 if self._toStore("SimulatedObservationAtCurrentAnalysis"):
226 self.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
227 if self._toStore("InnovationAtCurrentAnalysis"):
228 self.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
229 # ---> avec current state
230 if self._parameters["StoreInternalVariables"] \
231 or self._toStore("CurrentState"):
232 self.StoredVariables["CurrentState"].store( Xn )
233 if self._toStore("ForecastState"):
234 self.StoredVariables["ForecastState"].store( Xn_predicted )
235 if self._toStore("BMA"):
236 self.StoredVariables["BMA"].store( Xn_predicted - Xa )
237 if self._toStore("InnovationAtCurrentState"):
238 self.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
239 if self._toStore("SimulatedObservationAtCurrentState") \
240 or self._toStore("SimulatedObservationAtCurrentOptimum"):
241 self.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
243 if self._parameters["StoreInternalVariables"] \
244 or self._toStore("CostFunctionJ") \
245 or self._toStore("CostFunctionJb") \
246 or self._toStore("CostFunctionJo") \
247 or self._toStore("CurrentOptimum") \
248 or self._toStore("APosterioriCovariance"):
249 Jb = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
250 Jo = float( 0.5 * _Innovation.T * RI * _Innovation )
252 self.StoredVariables["CostFunctionJb"].store( Jb )
253 self.StoredVariables["CostFunctionJo"].store( Jo )
254 self.StoredVariables["CostFunctionJ" ].store( J )
256 if self._toStore("IndexOfOptimum") \
257 or self._toStore("CurrentOptimum") \
258 or self._toStore("CostFunctionJAtCurrentOptimum") \
259 or self._toStore("CostFunctionJbAtCurrentOptimum") \
260 or self._toStore("CostFunctionJoAtCurrentOptimum") \
261 or self._toStore("SimulatedObservationAtCurrentOptimum"):
262 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
263 if self._toStore("IndexOfOptimum"):
264 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
265 if self._toStore("CurrentOptimum"):
266 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["Analysis"][IndexMin] )
267 if self._toStore("SimulatedObservationAtCurrentOptimum"):
268 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
269 if self._toStore("CostFunctionJbAtCurrentOptimum"):
270 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
271 if self._toStore("CostFunctionJoAtCurrentOptimum"):
272 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
273 if self._toStore("CostFunctionJAtCurrentOptimum"):
274 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
275 if self._toStore("APosterioriCovariance"):
280 Pn = (1./(__m-1)) * Pn
281 self.StoredVariables["APosterioriCovariance"].store( Pn )
282 if self._parameters["EstimationOf"] == "Parameters" \
283 and J < previousJMinimum:
286 if self._toStore("APosterioriCovariance"):
289 # Stockage final supplémentaire de l'optimum en estimation de paramètres
290 # ----------------------------------------------------------------------
291 if self._parameters["EstimationOf"] == "Parameters":
292 self.StoredVariables["Analysis"].store( XaMin )
293 if self._toStore("APosterioriCovariance"):
294 self.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
295 if self._toStore("BMA"):
296 self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
301 # ==============================================================================
302 if __name__ == "__main__":
303 print('\n AUTODIAGNOSTIC\n')