1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2019 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 from daCore import BasicObjects
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
30 BasicObjects.Algorithm.__init__(self, "EXTENDEDKALMANFILTER")
31 self.defineRequiredParameter(
32 name = "ConstrainedBy",
33 default = "EstimateProjection",
35 message = "Prise en compte des contraintes",
36 listval = ["EstimateProjection"],
38 self.defineRequiredParameter(
39 name = "EstimationOf",
42 message = "Estimation d'etat ou de parametres",
43 listval = ["State", "Parameters"],
45 self.defineRequiredParameter(
46 name = "StoreInternalVariables",
49 message = "Stockage des variables internes ou intermédiaires du calcul",
51 self.defineRequiredParameter(
52 name = "StoreSupplementaryCalculations",
55 message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
58 "APosterioriCorrelations",
59 "APosterioriCovariance",
60 "APosterioriStandardDeviations",
61 "APosterioriVariances",
64 "CostFunctionJAtCurrentOptimum",
66 "CostFunctionJbAtCurrentOptimum",
68 "CostFunctionJoAtCurrentOptimum",
72 "InnovationAtCurrentAnalysis",
73 "InnovationAtCurrentState",
75 "SimulatedObservationAtCurrentAnalysis",
76 "SimulatedObservationAtCurrentOptimum",
77 "SimulatedObservationAtCurrentState",
80 self.defineRequiredParameter( # Pas de type
82 message = "Liste des valeurs de bornes",
84 self.requireInputArguments(
85 mandatory= ("Xb", "Y", "HO", "R", "B"),
86 optional = ("U", "EM", "CM", "Q"),
89 def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
90 self._pre_run(Parameters, Xb, Y, R, B, Q)
92 if self._parameters["EstimationOf"] == "Parameters":
93 self._parameters["StoreInternalVariables"] = True
97 Hm = HO["Direct"].appliedControledFormTo
99 if self._parameters["EstimationOf"] == "State":
100 Mm = EM["Direct"].appliedControledFormTo
102 if CM is not None and "Tangent" in CM and U is not None:
103 Cm = CM["Tangent"].asMatrix(Xb)
107 # Nombre de pas identique au nombre de pas d'observations
108 # -------------------------------------------------------
109 if hasattr(Y,"stepnumber"):
110 duration = Y.stepnumber()
114 # Précalcul des inversions de B et R
115 # ----------------------------------
116 if self._parameters["StoreInternalVariables"] \
117 or self._toStore("CostFunctionJ") \
118 or self._toStore("CostFunctionJb") \
119 or self._toStore("CostFunctionJo") \
120 or self._toStore("CurrentOptimum") \
121 or self._toStore("APosterioriCovariance"):
130 if len(self.StoredVariables["Analysis"])==0 or not self._parameters["nextStep"]:
131 self.StoredVariables["Analysis"].store( numpy.ravel(Xn) )
132 if self._toStore("APosterioriCovariance"):
133 self.StoredVariables["APosterioriCovariance"].store( Pn.asfullmatrix(Xn.size) )
138 previousJMinimum = numpy.finfo(float).max
140 for step in range(duration-1):
141 if hasattr(Y,"store"):
142 Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
144 Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
146 Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
147 Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
148 Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
149 Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
151 if self._parameters["EstimationOf"] == "State":
152 Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
153 Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
154 Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
155 Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
158 if hasattr(U,"store") and len(U)>1:
159 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
160 elif hasattr(U,"store") and len(U)==1:
161 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
163 Un = numpy.asmatrix(numpy.ravel( U )).T
167 if self._parameters["EstimationOf"] == "State":
168 Xn_predicted = numpy.asmatrix(numpy.ravel( Mm( (Xn, Un) ) )).T
169 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
170 Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
171 Xn_predicted = Xn_predicted + Cm * Un
172 Pn_predicted = Q + Mt * Pn * Ma
173 elif self._parameters["EstimationOf"] == "Parameters":
174 # --- > Par principe, M = Id, Q = 0
178 if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
179 Xn_predicted = numpy.max(numpy.hstack((Xn_predicted,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1)
180 Xn_predicted = numpy.min(numpy.hstack((Xn_predicted,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1)
182 if self._parameters["EstimationOf"] == "State":
183 _HX = numpy.asmatrix(numpy.ravel( Hm( (Xn_predicted, None) ) )).T
184 _Innovation = Ynpu - _HX
185 elif self._parameters["EstimationOf"] == "Parameters":
186 _HX = numpy.asmatrix(numpy.ravel( Hm( (Xn_predicted, Un) ) )).T
187 _Innovation = Ynpu - _HX
188 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
189 _Innovation = _Innovation - Cm * Un
191 _A = R + numpy.dot(Ht, Pn_predicted * Ha)
192 _u = numpy.linalg.solve( _A , _Innovation )
193 Xn = Xn_predicted + Pn_predicted * Ha * _u
194 Kn = Pn_predicted * Ha * (R + numpy.dot(Ht, Pn_predicted * Ha)).I
195 Pn = Pn_predicted - Kn * Ht * Pn_predicted
196 Xa, _HXa = Xn, _HX # Pointeurs
199 self.StoredVariables["Analysis"].store( Xa )
200 if self._toStore("SimulatedObservationAtCurrentAnalysis"):
201 self.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
202 if self._toStore("InnovationAtCurrentAnalysis"):
203 self.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
204 # ---> avec current state
205 if self._parameters["StoreInternalVariables"] \
206 or self._toStore("CurrentState"):
207 self.StoredVariables["CurrentState"].store( Xn )
208 if self._toStore("PredictedState"):
209 self.StoredVariables["PredictedState"].store( Xn_predicted )
210 if self._toStore("BMA"):
211 self.StoredVariables["BMA"].store( Xn_predicted - Xa )
212 if self._toStore("InnovationAtCurrentState"):
213 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
214 if self._toStore("SimulatedObservationAtCurrentState") \
215 or self._toStore("SimulatedObservationAtCurrentOptimum"):
216 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
218 if self._parameters["StoreInternalVariables"] \
219 or self._toStore("CostFunctionJ") \
220 or self._toStore("CostFunctionJb") \
221 or self._toStore("CostFunctionJo") \
222 or self._toStore("CurrentOptimum") \
223 or self._toStore("APosterioriCovariance"):
224 Jb = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
225 Jo = float( 0.5 * _Innovation.T * RI * _Innovation )
227 self.StoredVariables["CostFunctionJb"].store( Jb )
228 self.StoredVariables["CostFunctionJo"].store( Jo )
229 self.StoredVariables["CostFunctionJ" ].store( J )
231 if self._toStore("IndexOfOptimum") \
232 or self._toStore("CurrentOptimum") \
233 or self._toStore("CostFunctionJAtCurrentOptimum") \
234 or self._toStore("CostFunctionJbAtCurrentOptimum") \
235 or self._toStore("CostFunctionJoAtCurrentOptimum") \
236 or self._toStore("SimulatedObservationAtCurrentOptimum"):
237 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
238 if self._toStore("IndexOfOptimum"):
239 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
240 if self._toStore("CurrentOptimum"):
241 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["Analysis"][IndexMin] )
242 if self._toStore("SimulatedObservationAtCurrentOptimum"):
243 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
244 if self._toStore("CostFunctionJbAtCurrentOptimum"):
245 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
246 if self._toStore("CostFunctionJoAtCurrentOptimum"):
247 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
248 if self._toStore("CostFunctionJAtCurrentOptimum"):
249 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
250 if self._toStore("APosterioriCovariance"):
251 self.StoredVariables["APosterioriCovariance"].store( Pn )
252 if self._parameters["EstimationOf"] == "Parameters" \
253 and J < previousJMinimum:
256 if self._toStore("APosterioriCovariance"):
259 # Stockage final supplémentaire de l'optimum en estimation de paramètres
260 # ----------------------------------------------------------------------
261 if self._parameters["EstimationOf"] == "Parameters":
262 self.StoredVariables["Analysis"].store( XaMin )
263 if self._toStore("APosterioriCovariance"):
264 self.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
265 if self._toStore("BMA"):
266 self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
271 # ==============================================================================
272 if __name__ == "__main__":
273 print('\n AUTODIAGNOSTIC\n')