X-Git-Url: http://git.salome-platform.org/gitweb/?a=blobdiff_plain;f=src%2FdaComposant%2FdaAlgorithms%2FUnscentedKalmanFilter.py;h=43c873069d4a1c1c165af2932bee03a23a11f993;hb=671c7e1161d628a4dd90aa1eae9188e02ac35835;hp=19bc20def6a33975dec4c420b8831fbbbc88dd3c;hpb=4790fb60acb36159350ee1cda40107e6833ead3f;p=modules%2Fadao.git diff --git a/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py b/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py index 19bc20d..43c8730 100644 --- a/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py +++ b/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- # -# Copyright (C) 2008-2017 EDF R&D +# Copyright (C) 2008-2019 EDF R&D # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public @@ -82,7 +82,19 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): default = [], typecast = tuple, message = "Liste de calculs supplémentaires à stocker et/ou effectuer", - listval = ["APosterioriCorrelations", "APosterioriCovariance", "APosterioriStandardDeviations", "APosterioriVariances", "BMA", "CurrentState", "CostFunctionJ", "CostFunctionJb", "CostFunctionJo", "Innovation"] + listval = [ + "Analysis", + "APosterioriCorrelations", + "APosterioriCovariance", + "APosterioriStandardDeviations", + "APosterioriVariances", + "BMA", + "CostFunctionJ", + "CostFunctionJb", + "CostFunctionJo", + "CurrentState", + "InnovationAtCurrentState", + ] ) self.defineRequiredParameter( # Pas de type name = "Bounds", @@ -94,7 +106,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): - self._pre_run(Parameters, R, B, Q) + self._pre_run(Parameters, Xb, Y, R, B, Q) # if self._parameters["EstimationOf"] == "Parameters": self._parameters["StoreInternalVariables"] = True @@ -124,10 +136,10 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Opérateurs # ---------- - H = HO["Direct"].appliedControledFormTo + Hm = HO["Direct"].appliedControledFormTo # if self._parameters["EstimationOf"] == "State": - M = EM["Direct"].appliedControledFormTo + Mm = EM["Direct"].appliedControledFormTo # if CM is not None and "Tangent" in CM and U is not None: Cm = CM["Tangent"].asMatrix(Xb) @@ -143,23 +155,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Précalcul des inversions de B et R # ---------------------------------- - if self._parameters["StoreInternalVariables"]: + if self._parameters["StoreInternalVariables"] \ + or self._toStore("CostFunctionJ") \ + or self._toStore("CostFunctionJb") \ + or self._toStore("CostFunctionJo"): BI = B.getI() RI = R.getI() # # Initialisation # -------------- Xn = Xb - if hasattr(B,"asfullmatrix"): - Pn = B.asfullmatrix(Xn.size) - else: - Pn = B + if hasattr(B,"asfullmatrix"): Pn = B.asfullmatrix(Xn.size) + else: Pn = B # self.StoredVariables["Analysis"].store( Xn.A1 ) - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: + if self._toStore("APosterioriCovariance"): self.StoredVariables["APosterioriCovariance"].store( Pn ) covarianceXa = Pn - Xa = Xn + Xa = XaMin = Xb previousJMinimum = numpy.finfo(float).max # for step in range(duration-1): @@ -190,7 +203,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): XEtnnp = [] for point in range(nbSpts): if self._parameters["EstimationOf"] == "State": - XEtnnpi = numpy.asmatrix(numpy.ravel( M( (Xnp[:,point], Un) ) )).T + XEtnnpi = numpy.asmatrix(numpy.ravel( Mm( (Xnp[:,point], Un) ) )).T if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon ! Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape XEtnnpi = XEtnnpi + Cm * Un @@ -229,9 +242,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Ynnp = [] for point in range(nbSpts): if self._parameters["EstimationOf"] == "State": - Ynnpi = numpy.asmatrix(numpy.ravel( H( (Xnnp[:,point], None) ) )).T + Ynnpi = numpy.asmatrix(numpy.ravel( Hm( (Xnnp[:,point], None) ) )).T elif self._parameters["EstimationOf"] == "Parameters": - Ynnpi = numpy.asmatrix(numpy.ravel( H( (Xnnp[:,point], Un) ) )).T + Ynnpi = numpy.asmatrix(numpy.ravel( Hm( (Xnnp[:,point], Un) ) )).T Ynnp.append( Ynnpi ) Ynnp = numpy.hstack( Ynnp ) # @@ -255,43 +268,47 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if self._parameters["Bounds"] is not None and self._parameters["ConstrainedBy"] == "EstimateProjection": Xn = numpy.max(numpy.hstack((Xn,numpy.asmatrix(self._parameters["Bounds"])[:,0])),axis=1) Xn = numpy.min(numpy.hstack((Xn,numpy.asmatrix(self._parameters["Bounds"])[:,1])),axis=1) + Xa = Xn # Pointeurs # - self.StoredVariables["Analysis"].store( Xn.A1 ) - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: + # ---> avec analysis + self.StoredVariables["Analysis"].store( Xa ) + if self._toStore("APosterioriCovariance"): self.StoredVariables["APosterioriCovariance"].store( Pn ) - if "Innovation" in self._parameters["StoreSupplementaryCalculations"]: - self.StoredVariables["Innovation"].store( numpy.ravel( d.A1 ) ) - if self._parameters["StoreInternalVariables"]: - Jb = 0.5 * (Xn - Xb).T * BI * (Xn - Xb) - Jo = 0.5 * d.T * RI * d - J = float( Jb ) + float( Jo ) - if self._parameters["StoreInternalVariables"] or "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: - self.StoredVariables["CurrentState"].store( Xn ) + # ---> avec current state + if self._toStore("InnovationAtCurrentState"): + self.StoredVariables["InnovationAtCurrentState"].store( d ) + if self._parameters["StoreInternalVariables"] \ + or self._toStore("CurrentState"): + self.StoredVariables["CurrentState"].store( Xn ) + if self._parameters["StoreInternalVariables"] \ + or self._toStore("CostFunctionJ") \ + or self._toStore("CostFunctionJb") \ + or self._toStore("CostFunctionJo"): + Jb = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) ) + Jo = float( 0.5 * d.T * RI * d ) + J = Jb + Jo self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) - if J < previousJMinimum: - previousJMinimum = J - Xa = Xn - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: - covarianceXa = Pn - else: - Xa = Xn - # + if self._parameters["EstimationOf"] == "Parameters" \ + and J < previousJMinimum: + previousJMinimum = J + XaMin = Xa + if self._toStore("APosterioriCovariance"): + covarianceXaMin = Pn # - # Stockage supplementaire de l'optimum en estimation de parametres - # ---------------------------------------------------------------- + # Stockage final supplémentaire de l'optimum en estimation de paramètres + # ---------------------------------------------------------------------- if self._parameters["EstimationOf"] == "Parameters": - self.StoredVariables["Analysis"].store( Xa.A1 ) - if "APosterioriCovariance" in self._parameters["StoreSupplementaryCalculations"]: - self.StoredVariables["APosterioriCovariance"].store( covarianceXa ) - # - if "BMA" in self._parameters["StoreSupplementaryCalculations"]: - self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) + self.StoredVariables["Analysis"].store( XaMin ) + if self._toStore("APosterioriCovariance"): + self.StoredVariables["APosterioriCovariance"].store( covarianceXaMin ) + if self._toStore("BMA"): + self.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) ) # self._post_run(HO) return 0 # ============================================================================== if __name__ == "__main__": - print('\n AUTODIAGNOSTIC \n') + print('\n AUTODIAGNOSTIC\n')