Salome HOME
Adding ParallelFunctionTest algorithm and improve parallel modes
[modules/adao.git] / src / daComposant / daAlgorithms / ExtendedKalmanFilter.py
1 # -*- coding: utf-8 -*-
2 #
3 # Copyright (C) 2008-2019 EDF R&D
4 #
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
9 #
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 # Lesser General Public License for more details.
14 #
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18 #
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
20 #
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22
23 import logging
24 from daCore import BasicObjects
25 import numpy
26
27 # ==============================================================================
28 class ElementaryAlgorithm(BasicObjects.Algorithm):
29     def __init__(self):
30         BasicObjects.Algorithm.__init__(self, "EXTENDEDKALMANFILTER")
31         self.defineRequiredParameter(
32             name     = "ConstrainedBy",
33             default  = "EstimateProjection",
34             typecast = str,
35             message  = "Prise en compte des contraintes",
36             listval  = ["EstimateProjection"],
37             )
38         self.defineRequiredParameter(
39             name     = "EstimationOf",
40             default  = "State",
41             typecast = str,
42             message  = "Estimation d'etat ou de parametres",
43             listval  = ["State", "Parameters"],
44             )
45         self.defineRequiredParameter(
46             name     = "StoreInternalVariables",
47             default  = False,
48             typecast = bool,
49             message  = "Stockage des variables internes ou intermédiaires du calcul",
50             )
51         self.defineRequiredParameter(
52             name     = "StoreSupplementaryCalculations",
53             default  = [],
54             typecast = tuple,
55             message  = "Liste de calculs supplémentaires à stocker et/ou effectuer",
56             listval  = [
57                 "Analysis",
58                 "APosterioriCorrelations",
59                 "APosterioriCovariance",
60                 "APosterioriStandardDeviations",
61                 "APosterioriVariances",
62                 "BMA",
63                 "CostFunctionJ",
64                 "CostFunctionJAtCurrentOptimum",
65                 "CostFunctionJb",
66                 "CostFunctionJbAtCurrentOptimum",
67                 "CostFunctionJo",
68                 "CostFunctionJoAtCurrentOptimum",
69                 "CurrentOptimum",
70                 "CurrentState",
71                 "IndexOfOptimum",
72                 "InnovationAtCurrentAnalysis",
73                 "InnovationAtCurrentState",
74                 "PredictedState",
75                 "SimulatedObservationAtCurrentAnalysis",
76                 "SimulatedObservationAtCurrentOptimum",
77                 "SimulatedObservationAtCurrentState",
78                 ]
79             )
80         self.defineRequiredParameter( # Pas de type
81             name     = "Bounds",
82             message  = "Liste des valeurs de bornes",
83             )
84         self.requireInputArguments(
85             mandatory= ("Xb", "Y", "HO", "R", "B"),
86             optional = ("U", "EM", "CM", "Q"),
87             )
88
89     def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
90         self._pre_run(Parameters, Xb, Y, R, B, Q)
91         #
92         if self._parameters["EstimationOf"] == "Parameters":
93             self._parameters["StoreInternalVariables"] = True
94         #
95         # Opérateurs
96         # ----------
97         Hm = HO["Direct"].appliedControledFormTo
98         #
99         if self._parameters["EstimationOf"] == "State":
100             Mm = EM["Direct"].appliedControledFormTo
101         #
102         if CM is not None and "Tangent" in CM and U is not None:
103             Cm = CM["Tangent"].asMatrix(Xb)
104         else:
105             Cm = None
106         #
107         # Nombre de pas identique au nombre de pas d'observations
108         # -------------------------------------------------------
109         if hasattr(Y,"stepnumber"):
110             duration = Y.stepnumber()
111         else:
112             duration = 2
113         #
114         # Précalcul des inversions de B et R
115         # ----------------------------------
116         if self._parameters["StoreInternalVariables"] \
117             or self._toStore("CostFunctionJ") \
118             or self._toStore("CostFunctionJb") \
119             or self._toStore("CostFunctionJo") \
120             or self._toStore("CurrentOptimum") \
121             or self._toStore("APosterioriCovariance"):
122             BI = B.getI()
123             RI = R.getI()
124         #
125         # Initialisation
126         # --------------
127         Xn = Xb
128         Pn = B
129         #
130         if len(self.StoredVariables["Analysis"])==0 or not self._parameters["nextStep"]:
131             self.StoredVariables["Analysis"].store( numpy.ravel(Xn) )
132             if self._toStore("APosterioriCovariance"):
133                 self.StoredVariables["APosterioriCovariance"].store( Pn.asfullmatrix(Xn.size) )
134                 covarianceXa = Pn
135         #
136         Xa               = Xn
137         XaMin            = Xn
138         previousJMinimum = numpy.finfo(float).max
139         #
140         for step in range(duration-1):
141             if hasattr(Y,"store"):
142                 Ynpu = numpy.asmatrix(numpy.ravel( Y[step+1] )).T
143             else:
144                 Ynpu = numpy.asmatrix(numpy.ravel( Y )).T
145             #
146             Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
147             Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
148             Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
149             Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
150             #
151             if self._parameters["EstimationOf"] == "State":
152                 Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
153                 Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
154                 Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
155                 Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
156             #
157             if U is not None:
158                 if hasattr(U,"store") and len(U)>1:
159                     Un = numpy.asmatrix(numpy.ravel( U[step] )).T
160                 elif hasattr(U,"store") and len(U)==1:
161                     Un = numpy.asmatrix(numpy.ravel( U[0] )).T
162                 else:
163                     Un = numpy.asmatrix(numpy.ravel( U )).T
164             else:
165                 Un = None
166             #
167             if self._parameters["EstimationOf"] == "State":
168                 Xn_predicted = numpy.asmatrix(numpy.ravel( Mm( (Xn, Un) ) )).T
169                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
170                     Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
171                     Xn_predicted = Xn_predicted + Cm * Un
172                 Pn_predicted = Q + Mt * Pn * Ma
173             elif self._parameters["EstimationOf"] == "Parameters":
174                 # --- > Par principe, M = Id, Q = 0
175                 Xn_predicted = Xn
176                 Pn_predicted = Pn
177             #
178             if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection":
179                 Xn_predicted = numpy.max(numpy.hstack((Xn_predicted,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1)
180                 Xn_predicted = numpy.min(numpy.hstack((Xn_predicted,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1)
181             #
182             if self._parameters["EstimationOf"] == "State":
183                 _HX          = numpy.asmatrix(numpy.ravel( Hm( (Xn_predicted, None) ) )).T
184                 _Innovation  = Ynpu - _HX
185             elif self._parameters["EstimationOf"] == "Parameters":
186                 _HX          = numpy.asmatrix(numpy.ravel( Hm( (Xn_predicted, Un) ) )).T
187                 _Innovation  = Ynpu - _HX
188                 if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
189                     _Innovation = _Innovation - Cm * Un
190             #
191             _A = R + numpy.dot(Ht, Pn_predicted * Ha)
192             _u = numpy.linalg.solve( _A , _Innovation )
193             Xn = Xn_predicted + Pn_predicted * Ha * _u
194             Kn = Pn_predicted * Ha * (R + numpy.dot(Ht, Pn_predicted * Ha)).I
195             Pn = Pn_predicted - Kn * Ht * Pn_predicted
196             Xa, _HXa = Xn, _HX # Pointeurs
197             #
198             # ---> avec analysis
199             self.StoredVariables["Analysis"].store( Xa )
200             if self._toStore("SimulatedObservationAtCurrentAnalysis"):
201                 self.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( _HXa )
202             if self._toStore("InnovationAtCurrentAnalysis"):
203                 self.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
204             # ---> avec current state
205             if self._parameters["StoreInternalVariables"] \
206                 or self._toStore("CurrentState"):
207                 self.StoredVariables["CurrentState"].store( Xn )
208             if self._toStore("PredictedState"):
209                 self.StoredVariables["PredictedState"].store( Xn_predicted )
210             if self._toStore("BMA"):
211                 self.StoredVariables["BMA"].store( Xn_predicted - Xa )
212             if self._toStore("InnovationAtCurrentState"):
213                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
214             if self._toStore("SimulatedObservationAtCurrentState") \
215                 or self._toStore("SimulatedObservationAtCurrentOptimum"):
216                 self.StoredVariables["SimulatedObservationAtCurrentState"].store( _HX )
217             # ---> autres
218             if self._parameters["StoreInternalVariables"] \
219                 or self._toStore("CostFunctionJ") \
220                 or self._toStore("CostFunctionJb") \
221                 or self._toStore("CostFunctionJo") \
222                 or self._toStore("CurrentOptimum") \
223                 or self._toStore("APosterioriCovariance"):
224                 Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
225                 Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
226                 J   = Jb + Jo
227                 self.StoredVariables["CostFunctionJb"].store( Jb )
228                 self.StoredVariables["CostFunctionJo"].store( Jo )
229                 self.StoredVariables["CostFunctionJ" ].store( J )
230                 #
231                 if self._toStore("IndexOfOptimum") \
232                     or self._toStore("CurrentOptimum") \
233                     or self._toStore("CostFunctionJAtCurrentOptimum") \
234                     or self._toStore("CostFunctionJbAtCurrentOptimum") \
235                     or self._toStore("CostFunctionJoAtCurrentOptimum") \
236                     or self._toStore("SimulatedObservationAtCurrentOptimum"):
237                     IndexMin = numpy.argmin( self.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
238                 if self._toStore("IndexOfOptimum"):
239                     self.StoredVariables["IndexOfOptimum"].store( IndexMin )
240                 if self._toStore("CurrentOptimum"):
241                     self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["Analysis"][IndexMin] )
242                 if self._toStore("SimulatedObservationAtCurrentOptimum"):
243                     self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
244                 if self._toStore("CostFunctionJbAtCurrentOptimum"):
245                     self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] )
246                 if self._toStore("CostFunctionJoAtCurrentOptimum"):
247                     self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] )
248                 if self._toStore("CostFunctionJAtCurrentOptimum"):
249                     self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] )
250             if self._toStore("APosterioriCovariance"):
251                 self.StoredVariables["APosterioriCovariance"].store( Pn )
252             if self._parameters["EstimationOf"] == "Parameters" \
253                 and J < previousJMinimum:
254                 previousJMinimum    = J
255                 XaMin               = Xa
256                 if self._toStore("APosterioriCovariance"):
257                     covarianceXaMin = Pn
258         #
259         # Stockage final supplémentaire de l'optimum en estimation de paramètres
260         # ----------------------------------------------------------------------
261         if self._parameters["EstimationOf"] == "Parameters":
262             self.StoredVariables["Analysis"].store( XaMin )
263             if self._toStore("APosterioriCovariance"):
264                 self.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
265             if self._toStore("BMA"):
266                 self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
267         #
268         self._post_run(HO)
269         return 0
270
271 # ==============================================================================
272 if __name__ == "__main__":
273     print('\n AUTODIAGNOSTIC\n')