1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2020 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 from daCore import BasicObjects
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
30 BasicObjects.Algorithm.__init__(self, "EXTENDEDKALMANFILTER")
31 self.defineRequiredParameter(
32 name = "ConstrainedBy",
33 default = "EstimateProjection",
35 message = "Prise en compte des contraintes",
36 listval = ["EstimateProjection"],
38 self.defineRequiredParameter(
39 name = "EstimationOf",
42 message = "Estimation d'etat ou de parametres",
43 listval = ["State", "Parameters"],
45 self.defineRequiredParameter(
46 name = "StoreInternalVariables",
49 message = "Stockage des variables internes ou intermédiaires du calcul",
51 self.defineRequiredParameter(
52 name = "StoreSupplementaryCalculations",
55 message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
58 "APosterioriCorrelations",
59 "APosterioriCovariance",
60 "APosterioriStandardDeviations",
61 "APosterioriVariances",
64 "CostFunctionJAtCurrentOptimum",
66 "CostFunctionJbAtCurrentOptimum",
68 "CostFunctionJoAtCurrentOptimum",
69 "CurrentIterationNumber",
74 "InnovationAtCurrentAnalysis",
75 "InnovationAtCurrentState",
76 "SimulatedObservationAtCurrentAnalysis",
77 "SimulatedObservationAtCurrentOptimum",
78 "SimulatedObservationAtCurrentState",
81 self.defineRequiredParameter( # Pas de type
83 message = "Liste des valeurs de bornes",
85 self.requireInputArguments(
86 mandatory= ("Xb", "Y", "HO", "R", "B"),
87 optional = ("U", "EM", "CM", "Q"),
89 self.setAttributes(tags=(
96 def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
97 self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
99 if self._parameters["EstimationOf"] == "Parameters":
100 self._parameters["StoreInternalVariables"] = True
104 Hm = HO["Direct"].appliedControledFormTo
106 if self._parameters["EstimationOf"] == "State":
107 Mm = EM["Direct"].appliedControledFormTo
109 if CM is not None and "Tangent" in CM and U is not None:
110 Cm = CM["Tangent"].asMatrix(Xb)
114 # Nombre de pas identique au nombre de pas d'observations
115 # -------------------------------------------------------
116 if hasattr(Y,"stepnumber"):
117 duration = Y.stepnumber()
121 # Précalcul des inversions de B et R
122 # ----------------------------------
123 if self._parameters["StoreInternalVariables"] \
124 or self._toStore("CostFunctionJ") \
125 or self._toStore("CostFunctionJb") \
126 or self._toStore("CostFunctionJo") \
127 or self._toStore("CurrentOptimum") \
128 or self._toStore("APosterioriCovariance"):
137 if len(self.StoredVariables["Analysis"])==0 or not self._parameters["nextStep"]:
138 self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["Analysis"]) )
139 self.StoredVariables["Analysis"].store( numpy.ravel(Xn) )
140 if self._toStore("APosterioriCovariance"):
141 self.StoredVariables["APosterioriCovariance"].store( Pn.asfullmatrix(Xn.size) )
146 previousJMinimum = numpy.finfo(float).max
148 for step in range(duration-1):
149 if hasattr(Y,"store"):
150 Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
152 Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
154 Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
155 Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
156 Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
157 Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
159 if self._parameters["EstimationOf"] == "State":
160 Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
161 Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
162 Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
163 Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
166 if hasattr(U,"store") and len(U)>1:
167 Un = numpy.asmatrix(numpy.ravel( U[step] )).T
168 elif hasattr(U,"store") and len(U)==1:
169 Un = numpy.asmatrix(numpy.ravel( U[0] )).T
171 Un = numpy.asmatrix(numpy.ravel( U )).T
175 if self._parameters["EstimationOf"] == "State":
176 Xn_predicted = numpy.asmatrix(numpy.ravel( Mm( (Xn, Un) ) )).T
177 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
178 Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
179 Xn_predicted = Xn_predicted + Cm * Un
180 Pn_predicted = Q + Mt * Pn * Ma
181 elif self._parameters["EstimationOf"] == "Parameters":
182 # --- > Par principe, M = Id, Q = 0
186 if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
187 Xn_predicted = numpy.max(numpy.hstack((Xn_predicted,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1)
188 Xn_predicted = numpy.min(numpy.hstack((Xn_predicted,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1)
190 if self._parameters["EstimationOf"] == "State":
191 _HX = numpy.asmatrix(numpy.ravel( Hm( (Xn_predicted, None) ) )).T
192 _Innovation = Ynpu - _HX
193 elif self._parameters["EstimationOf"] == "Parameters":
194 _HX = numpy.asmatrix(numpy.ravel( Hm( (Xn_predicted, Un) ) )).T
195 _Innovation = Ynpu - _HX
196 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
197 _Innovation = _Innovation - Cm * Un
199 _A = R + numpy.dot(Ht, Pn_predicted * Ha)
200 _u = numpy.linalg.solve( _A , _Innovation )
201 Xn = Xn_predicted + Pn_predicted * Ha * _u
202 Kn = Pn_predicted * Ha * (R + numpy.dot(Ht, Pn_predicted * Ha)).I
203 Pn = Pn_predicted - Kn * Ht * Pn_predicted
204 Xa, _HXa = Xn, _HX # Pointeurs
206 self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["Analysis"]) )
208 self.StoredVariables["Analysis"].store( Xa )
209 if self._toStore("SimulatedObservationAtCurrentAnalysis"):
210 self.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
211 if self._toStore("InnovationAtCurrentAnalysis"):
212 self.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
213 # ---> avec current state
214 if self._parameters["StoreInternalVariables"] \
215 or self._toStore("CurrentState"):
216 self.StoredVariables["CurrentState"].store( Xn )
217 if self._toStore("ForecastState"):
218 self.StoredVariables["ForecastState"].store( Xn_predicted )
219 if self._toStore("BMA"):
220 self.StoredVariables["BMA"].store( Xn_predicted - Xa )
221 if self._toStore("InnovationAtCurrentState"):
222 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
223 if self._toStore("SimulatedObservationAtCurrentState") \
224 or self._toStore("SimulatedObservationAtCurrentOptimum"):
225 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
227 if self._parameters["StoreInternalVariables"] \
228 or self._toStore("CostFunctionJ") \
229 or self._toStore("CostFunctionJb") \
230 or self._toStore("CostFunctionJo") \
231 or self._toStore("CurrentOptimum") \
232 or self._toStore("APosterioriCovariance"):
233 Jb = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
234 Jo = float( 0.5 * _Innovation.T * RI * _Innovation )
236 self.StoredVariables["CostFunctionJb"].store( Jb )
237 self.StoredVariables["CostFunctionJo"].store( Jo )
238 self.StoredVariables["CostFunctionJ" ].store( J )
240 if self._toStore("IndexOfOptimum") \
241 or self._toStore("CurrentOptimum") \
242 or self._toStore("CostFunctionJAtCurrentOptimum") \
243 or self._toStore("CostFunctionJbAtCurrentOptimum") \
244 or self._toStore("CostFunctionJoAtCurrentOptimum") \
245 or self._toStore("SimulatedObservationAtCurrentOptimum"):
246 IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
247 if self._toStore("IndexOfOptimum"):
248 self.StoredVariables["IndexOfOptimum"].store( IndexMin )
249 if self._toStore("CurrentOptimum"):
250 self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["Analysis"][IndexMin] )
251 if self._toStore("SimulatedObservationAtCurrentOptimum"):
252 self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
253 if self._toStore("CostFunctionJbAtCurrentOptimum"):
254 self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
255 if self._toStore("CostFunctionJoAtCurrentOptimum"):
256 self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
257 if self._toStore("CostFunctionJAtCurrentOptimum"):
258 self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
259 if self._toStore("APosterioriCovariance"):
260 self.StoredVariables["APosterioriCovariance"].store( Pn )
261 if self._parameters["EstimationOf"] == "Parameters" \
262 and J < previousJMinimum:
265 if self._toStore("APosterioriCovariance"):
268 # Stockage final supplémentaire de l'optimum en estimation de paramètres
269 # ----------------------------------------------------------------------
270 if self._parameters["EstimationOf"] == "Parameters":
271 self.StoredVariables["CurrentIterationNumber"].store( len(self.StoredVariables["Analysis"]) )
272 self.StoredVariables["Analysis"].store( XaMin )
273 if self._toStore("APosterioriCovariance"):
274 self.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
275 if self._toStore("BMA"):
276 self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
281 # ==============================================================================
282 if __name__ == "__main__":
283 print('\n AUTODIAGNOSTIC\n')