1 # -*- coding: utf-8 -*-
3 # Copyright (C) 2008-2024 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
24 Unscented Kalman Filter
26 __author__ = "Jean-Philippe ARGAUD"
28 import numpy, scipy, copy
29 from daCore.NumericObjects import GenerateWeightsAndSigmaPoints
30 from daCore.PlatformInfo import PlatformInfo, vfloat
31 mpr = PlatformInfo().MachinePrecision()
33 # ==============================================================================
34 def ecwukf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, VariantM="UKF"):
36 Unscented Kalman Filter
38 if selfA._parameters["EstimationOf"] == "Parameters":
39 selfA._parameters["StoreInternalVariables"] = True
41 wsp = GenerateWeightsAndSigmaPoints(
43 EO = selfA._parameters["EstimationOf"],
45 Alpha = selfA._parameters["Alpha"],
46 Beta = selfA._parameters["Beta"],
47 Kappa = selfA._parameters["Kappa"],
49 Wm, Wc, SC = wsp.get()
51 # Durée d'observation et tailles
52 if hasattr(Y, "stepnumber"):
53 duration = Y.stepnumber()
54 __p = numpy.cumprod(Y.shape())[-1]
59 # Précalcul des inversions de B et R
60 if selfA._parameters["StoreInternalVariables"] \
61 or selfA._toStore("CostFunctionJ") \
62 or selfA._toStore("CostFunctionJb") \
63 or selfA._toStore("CostFunctionJo") \
64 or selfA._toStore("CurrentOptimum") \
65 or selfA._toStore("APosterioriCovariance"):
70 nbPreviousSteps = len(selfA.StoredVariables["Analysis"])
72 if len(selfA.StoredVariables["Analysis"]) == 0 or not selfA._parameters["nextStep"]:
74 if hasattr(B, "asfullmatrix"):
75 Pn = B.asfullmatrix(__n)
78 selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
79 selfA.StoredVariables["Analysis"].store( Xb )
80 if selfA._toStore("APosterioriCovariance"):
81 selfA.StoredVariables["APosterioriCovariance"].store( Pn )
82 elif selfA._parameters["nextStep"]:
83 Xn = selfA._getInternalState("Xn")
84 Pn = selfA._getInternalState("Pn")
86 if selfA._parameters["EstimationOf"] == "Parameters":
88 previousJMinimum = numpy.finfo(float).max
90 for step in range(duration - 1):
93 if hasattr(U, "store") and len(U) > 1:
94 Un = numpy.ravel( U[step] ).reshape((-1, 1))
95 elif hasattr(U, "store") and len(U) == 1:
96 Un = numpy.ravel( U[0] ).reshape((-1, 1))
98 Un = numpy.ravel( U ).reshape((-1, 1))
102 if CM is not None and "Tangent" in CM and U is not None:
103 Cm = CM["Tangent"].asMatrix(Xn)
107 Pndemi = numpy.real(scipy.linalg.sqrtm(Pn))
108 Xnmu = Xn + Pndemi @ SC
111 if selfA._parameters["EstimationOf"] == "State":
112 Mm = EM["Direct"].appliedControledFormTo
113 XEnnmu = Mm( [(Xnmu[:, point].reshape((-1, 1)), Un) for point in range(nbSpts)],
115 returnSerieAsArrayMatrix = True )
116 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
117 Cm = Cm.reshape(__n, Un.size) # ADAO & check shape
118 XEnnmu = XEnnmu + Cm @ Un
119 elif selfA._parameters["EstimationOf"] == "Parameters":
120 # --- > Par principe, M = Id, Q = 0
121 XEnnmu = numpy.array( Xnmu )
123 Xhmn = ( XEnnmu * Wm ).sum(axis=1)
125 if selfA._parameters["EstimationOf"] == "State":
127 elif selfA._parameters["EstimationOf"] == "Parameters":
129 for point in range(nbSpts):
130 dXEnnmuXhmn = XEnnmu[:, point].flat - Xhmn
131 Pmn += Wc[point] * numpy.outer(dXEnnmuXhmn, dXEnnmuXhmn)
133 Pmndemi = numpy.real(scipy.linalg.sqrtm(Pmn))
134 Xnnmu = Xhmn.reshape((-1, 1)) + Pmndemi @ SC
136 Hm = HO["Direct"].appliedControledFormTo
137 Ynnmu = Hm( [(Xnnmu[:, point], None) for point in range(nbSpts)],
139 returnSerieAsArrayMatrix = True )
141 Yhmn = ( Ynnmu * Wm ).sum(axis=1)
145 for point in range(nbSpts):
146 dYnnmuYhmn = Ynnmu[:, point].flat - Yhmn
147 dXnnmuXhmn = Xnnmu[:, point].flat - Xhmn
148 Pyyn += Wc[point] * numpy.outer(dYnnmuYhmn, dYnnmuYhmn)
149 Pxyn += Wc[point] * numpy.outer(dXnnmuXhmn, dYnnmuYhmn)
151 if hasattr(Y, "store"):
152 Ynpu = numpy.ravel( Y[step + 1] ).reshape((__p, 1))
154 Ynpu = numpy.ravel( Y ).reshape((__p, 1))
155 _Innovation = Ynpu - Yhmn.reshape((-1, 1))
156 if selfA._parameters["EstimationOf"] == "Parameters":
157 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
158 _Innovation = _Innovation - Cm @ Un
160 Kn = Pxyn @ scipy.linalg.inv(Pyyn)
161 Xn = Xhmn.reshape((-1, 1)) + Kn @ _Innovation
162 Pn = Pmn - Kn @ (Pyyn @ Kn.T)
165 # --------------------------
166 selfA._setInternalState("Xn", Xn)
167 selfA._setInternalState("Pn", Pn)
168 # --------------------------
170 selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
172 selfA.StoredVariables["Analysis"].store( Xa )
173 if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
174 selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Hm((Xa, None)) )
175 if selfA._toStore("InnovationAtCurrentAnalysis"):
176 selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
177 # ---> avec current state
178 if selfA._parameters["StoreInternalVariables"] \
179 or selfA._toStore("CurrentState"):
180 selfA.StoredVariables["CurrentState"].store( Xn )
181 if selfA._toStore("ForecastState"):
182 selfA.StoredVariables["ForecastState"].store( Xhmn )
183 if selfA._toStore("ForecastCovariance"):
184 selfA.StoredVariables["ForecastCovariance"].store( Pmn )
185 if selfA._toStore("BMA"):
186 selfA.StoredVariables["BMA"].store( Xhmn - Xa )
187 if selfA._toStore("InnovationAtCurrentState"):
188 selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
189 if selfA._toStore("SimulatedObservationAtCurrentState") \
190 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
191 selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( Yhmn )
193 if selfA._parameters["StoreInternalVariables"] \
194 or selfA._toStore("CostFunctionJ") \
195 or selfA._toStore("CostFunctionJb") \
196 or selfA._toStore("CostFunctionJo") \
197 or selfA._toStore("CurrentOptimum") \
198 or selfA._toStore("APosterioriCovariance"):
199 Jb = vfloat( 0.5 * (Xa - Xb).T * (BI * (Xa - Xb)) )
200 Jo = vfloat( 0.5 * _Innovation.T * (RI * _Innovation) )
202 selfA.StoredVariables["CostFunctionJb"].store( Jb )
203 selfA.StoredVariables["CostFunctionJo"].store( Jo )
204 selfA.StoredVariables["CostFunctionJ" ].store( J )
206 if selfA._toStore("IndexOfOptimum") \
207 or selfA._toStore("CurrentOptimum") \
208 or selfA._toStore("CostFunctionJAtCurrentOptimum") \
209 or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
210 or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
211 or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
212 IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
213 if selfA._toStore("IndexOfOptimum"):
214 selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
215 if selfA._toStore("CurrentOptimum"):
216 selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
217 if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
218 selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] ) # noqa: E501
219 if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
220 selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] ) # noqa: E501
221 if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
222 selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] ) # noqa: E501
223 if selfA._toStore("CostFunctionJAtCurrentOptimum"):
224 selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] ) # noqa: E501
225 if selfA._toStore("APosterioriCovariance"):
226 selfA.StoredVariables["APosterioriCovariance"].store( Pn )
227 if selfA._parameters["EstimationOf"] == "Parameters" \
228 and J < previousJMinimum:
231 if selfA._toStore("APosterioriCovariance"):
232 covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
234 # Stockage final supplémentaire de l'optimum en estimation de paramètres
235 # ----------------------------------------------------------------------
236 if selfA._parameters["EstimationOf"] == "Parameters":
237 selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
238 selfA.StoredVariables["Analysis"].store( XaMin )
239 if selfA._toStore("APosterioriCovariance"):
240 selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
241 if selfA._toStore("BMA"):
242 selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
246 # ==============================================================================
247 if __name__ == "__main__":
248 print('\n AUTODIAGNOSTIC\n')