1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2019 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 from daCore import BasicObjects, PlatformInfo
26 mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
28 # ==============================================================================
29 class ElementaryAlgorithm(BasicObjects.Algorithm):
31 BasicObjects.Algorithm.__init__(self, "ENSEMBLEKALMANFILTER")
32 self.defineRequiredParameter(
33 name = "NumberOfMembers",
36 message = "Nombre de membres dans l'ensemble",
39 self.defineRequiredParameter(
40 name = "EstimationOf",
43 message = "Estimation d'etat ou de parametres",
44 listval = ["State", "Parameters"],
46 self.defineRequiredParameter(
48 typecast = numpy.random.seed,
49 message = "Graine fixée pour le générateur aléatoire",
51 self.defineRequiredParameter(
52 name = "StoreInternalVariables",
55 message = "Stockage des variables internes ou intermédiaires du calcul",
57 self.defineRequiredParameter(
58 name = "StoreSupplementaryCalculations",
61 message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
64 "APosterioriCorrelations",
65 "APosterioriCovariance",
66 "APosterioriStandardDeviations",
67 "APosterioriVariances",
70 "CostFunctionJAtCurrentOptimum",
72 "CostFunctionJbAtCurrentOptimum",
74 "CostFunctionJoAtCurrentOptimum",
78 "InnovationAtCurrentAnalysis",
79 "InnovationAtCurrentState",
81 "SimulatedObservationAtCurrentAnalysis",
82 "SimulatedObservationAtCurrentOptimum",
83 "SimulatedObservationAtCurrentState",
86 self.requireInputArguments(
87 mandatory= ("Xb", "Y", "HO", "R", "B"),
88 optional = ("U", "EM", "CM", "Q"),
91 def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
92 self._pre_run(Parameters, Xb, Y, R, B, Q)
94 if self._parameters["EstimationOf"] == "Parameters":
95 self._parameters["StoreInternalVariables"] = True
99 H = HO["Direct"].appliedControledFormTo
101 if self._parameters["EstimationOf"] == "State":
102 M = EM["Direct"].appliedControledFormTo
104 if CM is not None and "Tangent" in CM and U is not None:
105 Cm = CM["Tangent"].asMatrix(Xb)
109 # Nombre de pas identique au nombre de pas d'observations
110 # -------------------------------------------------------
111 if hasattr(Y,"stepnumber"):
112 duration = Y.stepnumber()
113 __p = numpy.cumprod(Y.shape())[-1]
116 __p = numpy.array(Y).size
118 # Précalcul des inversions de B et R
119 # ----------------------------------
120 if self._parameters["StoreInternalVariables"] \
121 or self._toStore("CostFunctionJ") \
122 or self._toStore("CostFunctionJb") \
123 or self._toStore("CostFunctionJo") \
124 or self._toStore("CurrentOptimum") \
125 or self._toStore("APosterioriCovariance"):
128 BIdemi = B.choleskyI()
129 RIdemi = R.choleskyI()
134 __m = self._parameters["NumberOfMembers"]
135 Xn = numpy.asmatrix(numpy.dot( Xb.reshape(__n,1), numpy.ones((1,__m)) ))
136 if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(__n)
138 if hasattr(R,"asfullmatrix"): Rn = R.asfullmatrix(__p)
140 if hasattr(Q,"asfullmatrix"): Qn = Q.asfullmatrix(__n)
143 if len(self.StoredVariables["Analysis"])==0 or not self._parameters["nextStep"]:
144 self.StoredVariables["Analysis"].store( numpy.ravel(Xb) )
145 if self._toStore("APosterioriCovariance"):
146 self.StoredVariables["APosterioriCovariance"].store( Pn )
151 previousJMinimum = numpy.finfo(float).max
154 Xn_predicted = numpy.asmatrix(numpy.zeros((__n,__m)))
155 HX_predicted = numpy.asmatrix(numpy.zeros((__p,__m)))
157 for step in range(duration-1):
158 if hasattr(Y,"store"):
159 Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
161 Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
164 if hasattr(U,"store") and len(U)>1:
165 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
166 elif hasattr(U,"store") and len(U)==1:
167 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
169 Un = numpy.asmatrix(numpy.ravel( U )).T
173 if self._parameters["EstimationOf"] == "State":
175 qi = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__n), Qn, (1,1,1))).T
176 Xn_predicted[:,i] = numpy.asmatrix(numpy.ravel( M((Xn[:,i], Un)) )).T + qi
177 HX_predicted[:,i] = numpy.asmatrix(numpy.ravel( H((Xn_predicted[:,i], Un)) )).T
178 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
179 Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
180 Xn_predicted = Xn_predicted + Cm * Un
181 elif self._parameters["EstimationOf"] == "Parameters":
182 # --- > Par principe, M = Id, Q = 0
185 Xfm = numpy.asmatrix(numpy.ravel(Xn_predicted.mean(axis=1, dtype=mfp))).T
186 Hfm = numpy.asmatrix(numpy.ravel(HX_predicted.mean(axis=1, dtype=mfp))).T
190 Exfi = Xn_predicted[:,i] - Xfm
191 Eyfi = HX_predicted[:,i] - Hfm
192 PfHT += Exfi * Eyfi.T
193 HPfHT += Eyfi * Eyfi.T
194 PfHT = (1./(__m-1)) * PfHT
195 HPfHT = (1./(__m-1)) * HPfHT
196 K = PfHT * ( R + HPfHT ).I
200 ri = numpy.asmatrix(numpy.random.multivariate_normal(numpy.zeros(__p), Rn, (1,1,1))).T
201 Xn[:,i] = Xn_predicted[:,i] + K * (Ynpu + ri - HX_predicted[:,i])
203 Xa = Xn.mean(axis=1, dtype=mfp)
205 if self._parameters["StoreInternalVariables"] \
206 or self._toStore("CostFunctionJ") \
207 or self._toStore("CostFunctionJb") \
208 or self._toStore("CostFunctionJo") \
209 or self._toStore("APosterioriCovariance") \
210 or self._toStore("InnovationAtCurrentAnalysis") \
211 or self._toStore("SimulatedObservationAtCurrentAnalysis") \
212 or self._toStore("SimulatedObservationAtCurrentOptimum"):
213 _HXa = numpy.asmatrix(numpy.ravel( H((Xa, Un)) )).T
214 _Innovation = Ynpu - _HXa
217 self.StoredVariables["Analysis"].store( Xa )
218 if self._toStore("SimulatedObservationAtCurrentAnalysis"):
219 self.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
220 if self._toStore("InnovationAtCurrentAnalysis"):
221 self.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
222 # ---> avec current state
223 if self._parameters["StoreInternalVariables"] \
224 or self._toStore("CurrentState"):
225 self.StoredVariables["CurrentState"].store( Xn )
226 if self._toStore("PredictedState"):
227 self.StoredVariables["PredictedState"].store( Xn_predicted )
228 if self._toStore("BMA"):
229 self.StoredVariables["BMA"].store( Xn_predicted - Xa )
230 if self._toStore("InnovationAtCurrentState"):
231 self.StoredVariables["InnovationAtCurrentState"].store( - HX_predicted + Ynpu )
232 if self._toStore("SimulatedObservationAtCurrentState") \
233 or self._toStore("SimulatedObservationAtCurrentOptimum"):
234 self.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
236 if self._parameters["StoreInternalVariables"] \
237 or self._toStore("CostFunctionJ") \
238 or self._toStore("CostFunctionJb") \
239 or self._toStore("CostFunctionJo") \
240 or self._toStore("CurrentOptimum") \
241 or self._toStore("APosterioriCovariance"):
242 Jb = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
243 Jo = float( 0.5 * _Innovation.T * RI * _Innovation )
245 self.StoredVariables["CostFunctionJb"].store( Jb )
246 self.StoredVariables["CostFunctionJo"].store( Jo )
247 self.StoredVariables["CostFunctionJ" ].store( J )
249 if self._toStore("IndexOfOptimum") \
250 or self._toStore("CurrentOptimum") \
251 or self._toStore("CostFunctionJAtCurrentOptimum") \
252 or self._toStore("CostFunctionJbAtCurrentOptimum") \
253 or self._toStore("CostFunctionJoAtCurrentOptimum") \
254 or self._toStore("SimulatedObservationAtCurrentOptimum"):
255 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
256 if self._toStore("IndexOfOptimum"):
257 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
258 if self._toStore("CurrentOptimum"):
259 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["Analysis"][IndexMin] )
260 if self._toStore("SimulatedObservationAtCurrentOptimum"):
261 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
262 if self._toStore("CostFunctionJbAtCurrentOptimum"):
263 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
264 if self._toStore("CostFunctionJoAtCurrentOptimum"):
265 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
266 if self._toStore("CostFunctionJAtCurrentOptimum"):
267 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
268 if self._toStore("APosterioriCovariance"):
273 Pn = (1./(__m-1)) * Pn
274 self.StoredVariables["APosterioriCovariance"].store( Pn )
275 if self._parameters["EstimationOf"] == "Parameters" \
276 and J < previousJMinimum:
279 if self._toStore("APosterioriCovariance"):
282 # Stockage final supplémentaire de l'optimum en estimation de paramètres
283 # ----------------------------------------------------------------------
284 if self._parameters["EstimationOf"] == "Parameters":
285 self.StoredVariables["Analysis"].store( XaMin )
286 if self._toStore("APosterioriCovariance"):
287 self.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
288 if self._toStore("BMA"):
289 self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
294 # ==============================================================================
295 if __name__ == "__main__":
296 print('\n AUTODIAGNOSTIC\n')