From 506741362ba237989f4ebdec5d2592fa38ea779a Mon Sep 17 00:00:00 2001 From: Jean-Philippe ARGAUD Date: Sun, 13 Feb 2022 07:19:12 +0100 Subject: [PATCH] Code improvements, review and simplifications (3) --- src/daComposant/daAlgorithms/3DVAR.py | 5 +- src/daComposant/daAlgorithms/Atoms/c2ukf.py | 29 +-- src/daComposant/daAlgorithms/Atoms/cekf.py | 52 ++--- src/daComposant/daAlgorithms/Atoms/ecwblue.py | 44 +++- .../daAlgorithms/Atoms/ecwexblue.py | 46 ++-- src/daComposant/daAlgorithms/Atoms/ecwlls.py | 12 +- src/daComposant/daAlgorithms/Atoms/ecwnlls.py | 5 +- .../daAlgorithms/Atoms/ecwstdkf.py | 151 ++++++++++++ src/daComposant/daAlgorithms/Atoms/exkf.py | 49 ++-- .../daAlgorithms/Atoms/incr3dvar.py | 5 +- .../daAlgorithms/Atoms/psas3dvar.py | 14 +- .../daAlgorithms/Atoms/std3dvar.py | 7 +- .../daAlgorithms/Atoms/std4dvar.py | 2 +- src/daComposant/daAlgorithms/Atoms/stdkf.py | 219 ------------------ src/daComposant/daAlgorithms/Atoms/uskf.py | 26 +-- .../daAlgorithms/Atoms/van3dvar.py | 5 +- src/daComposant/daAlgorithms/Blue.py | 4 +- src/daComposant/daAlgorithms/ExtendedBlue.py | 3 +- src/daComposant/daAlgorithms/KalmanFilter.py | 28 ++- .../daAlgorithms/LinearLeastSquares.py | 3 +- .../daAlgorithms/NonLinearLeastSquares.py | 3 +- .../daAlgorithms/UnscentedKalmanFilter.py | 1 - src/daComposant/daCore/NumericObjects.py | 62 +++-- 23 files changed, 389 insertions(+), 386 deletions(-) create mode 100644 src/daComposant/daAlgorithms/Atoms/ecwstdkf.py delete mode 100644 src/daComposant/daAlgorithms/Atoms/stdkf.py diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index 1500028..50f993e 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -120,6 +120,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "CurrentIterationNumber", "CurrentOptimum", "CurrentState", + "CurrentStepNumber", "ForecastState", "IndexOfOptimum", "Innovation", @@ -207,8 +208,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): NumericObjects.multiXOsteps(self, Xb, Y, U, HO, EM, CM, R, B, Q, psas3dvar.psas3dvar) # #-------------------------- - elif self._parameters["Variant"] in ["OneCorrection", "OneCorrection3DVAR-Std"]: - std3dvar.std3dvar(self, Xb, Y, HO, R, B) + elif self._parameters["Variant"] == "OneCorrection": + std3dvar.std3dvar(self, Xb, Y, U, HO, CM, R, B) # #-------------------------- else: diff --git a/src/daComposant/daAlgorithms/Atoms/c2ukf.py b/src/daComposant/daAlgorithms/Atoms/c2ukf.py index cd75087..bc02f5b 100644 --- a/src/daComposant/daAlgorithms/Atoms/c2ukf.py +++ b/src/daComposant/daAlgorithms/Atoms/c2ukf.py @@ -26,8 +26,7 @@ __doc__ = """ __author__ = "Jean-Philippe ARGAUD" import math, numpy, scipy -from daCore.NumericObjects import ForceNumericBounds -from daCore.NumericObjects import ApplyBounds +from daCore.NumericObjects import ApplyBounds, ForceNumericBounds from daCore.PlatformInfo import PlatformInfo mpr = PlatformInfo().MachinePrecision() mfp = PlatformInfo().MaximumPrecision() @@ -64,17 +63,6 @@ def c2ukf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): Wc = numpy.array( Ww ) Wc[0] = Lambda / (L + Lambda) + (1. - Alpha**2 + Beta) # - # Opérateurs - Hm = HO["Direct"].appliedControledFormTo - # - if selfA._parameters["EstimationOf"] == "State": - Mm = EM["Direct"].appliedControledFormTo - # - if CM is not None and "Tangent" in CM and U is not None: - Cm = CM["Tangent"].asMatrix(Xb) - else: - Cm = None - # # Durée d'observation et tailles if hasattr(Y,"stepnumber"): duration = Y.stepnumber() @@ -115,10 +103,6 @@ def c2ukf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): previousJMinimum = numpy.finfo(float).max # for step in range(duration-1): - if hasattr(Y,"store"): - Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1)) - else: - Ynpu = numpy.ravel( Y ).reshape((__p,1)) # if U is not None: if hasattr(U,"store") and len(U)>1: @@ -130,6 +114,11 @@ def c2ukf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): else: Un = None # + if CM is not None and "Tangent" in CM and U is not None: + Cm = CM["Tangent"].asMatrix(Xn) + else: + Cm = None + # Pndemi = numpy.real(scipy.linalg.sqrtm(Pn)) Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi]) nbSpts = 2*Xn.size+1 @@ -141,6 +130,7 @@ def c2ukf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): XEtnnp = [] for point in range(nbSpts): if selfA._parameters["EstimationOf"] == "State": + Mm = EM["Direct"].appliedControledFormTo XEtnnpi = numpy.asarray( Mm( (Xnp[:,point], Un) ) ).reshape((-1,1)) if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon ! Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape @@ -174,6 +164,7 @@ def c2ukf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): for point in range(nbSpts): Xnnp[:,point] = ApplyBounds( Xnnp[:,point], selfA._parameters["Bounds"] ) # + Hm = HO["Direct"].appliedControledFormTo Ynnp = [] for point in range(nbSpts): if selfA._parameters["EstimationOf"] == "State": @@ -191,6 +182,10 @@ def c2ukf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): Pyyn += Wc[i] * ((Ynnp[:,point]-Yncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm)) Pxyn += Wc[i] * ((Xnnp[:,point]-Xncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm)) # + if hasattr(Y,"store"): + Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1)) + else: + Ynpu = numpy.ravel( Y ).reshape((__p,1)) _Innovation = Ynpu - Yncm.reshape((-1,1)) if selfA._parameters["EstimationOf"] == "Parameters": if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon ! diff --git a/src/daComposant/daAlgorithms/Atoms/cekf.py b/src/daComposant/daAlgorithms/Atoms/cekf.py index f4bc1f8..c668b5c 100644 --- a/src/daComposant/daAlgorithms/Atoms/cekf.py +++ b/src/daComposant/daAlgorithms/Atoms/cekf.py @@ -26,8 +26,7 @@ __doc__ = """ __author__ = "Jean-Philippe ARGAUD" import numpy -from daCore.NumericObjects import ForceNumericBounds -from daCore.NumericObjects import ApplyBounds +from daCore.NumericObjects import ApplyBounds, ForceNumericBounds from daCore.PlatformInfo import PlatformInfo mpr = PlatformInfo().MachinePrecision() mfp = PlatformInfo().MaximumPrecision() @@ -41,17 +40,6 @@ def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): selfA._parameters["StoreInternalVariables"] = True selfA._parameters["Bounds"] = ForceNumericBounds( selfA._parameters["Bounds"] ) # - # Opérateurs - H = HO["Direct"].appliedControledFormTo - # - if selfA._parameters["EstimationOf"] == "State": - M = EM["Direct"].appliedControledFormTo - # - if CM is not None and "Tangent" in CM and U is not None: - Cm = CM["Tangent"].asMatrix(Xb) - else: - Cm = None - # # Durée d'observation et tailles if hasattr(Y,"stepnumber"): duration = Y.stepnumber() @@ -93,21 +81,6 @@ def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): previousJMinimum = numpy.finfo(float).max # for step in range(duration-1): - if hasattr(Y,"store"): - Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1)) - else: - Ynpu = numpy.ravel( Y ).reshape((__p,1)) - # - Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn) - Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape - Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn) - Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape - # - if selfA._parameters["EstimationOf"] == "State": - Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn) - Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape - Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn) - Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape # if U is not None: if hasattr(U,"store") and len(U)>1: @@ -123,8 +96,14 @@ def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): Xn = ApplyBounds( Xn, selfA._parameters["Bounds"] ) # if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast + Mt = EM["Tangent"].asMatrix(Xn) + Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape + Ma = EM["Adjoint"].asMatrix(Xn) + Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape + M = EM["Direct"].appliedControledFormTo Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1)) - if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon ! + if CM is not None and "Tangent" in CM and Un is not None: # Attention : si Cm est aussi dans M, doublon ! + Cm = CM["Tangent"].asMatrix(Xn_predicted) Cm = Cm.reshape(__n,Un.size) # ADAO & check shape Xn_predicted = Xn_predicted + Cm @ Un Pn_predicted = Q + Mt * (Pn * Ma) @@ -136,13 +115,26 @@ def cekf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection": Xn_predicted = ApplyBounds( Xn_predicted, selfA._parameters["Bounds"] ) # + if hasattr(Y,"store"): + Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1)) + else: + Ynpu = numpy.ravel( Y ).reshape((__p,1)) + # + Ht = HO["Tangent"].asMatrix(Xn_predicted) + Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape + Ha = HO["Adjoint"].asMatrix(Xn_predicted) + Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape + H = HO["Direct"].appliedControledFormTo + # if selfA._parameters["EstimationOf"] == "State": HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1)) _Innovation = Ynpu - HX_predicted elif selfA._parameters["EstimationOf"] == "Parameters": HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1)) _Innovation = Ynpu - HX_predicted - if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon ! + if CM is not None and "Tangent" in CM and Un is not None: # Attention : si Cm est aussi dans H, doublon ! + Cm = CM["Tangent"].asMatrix(Xn_predicted) + Cm = Cm.reshape(__n,Un.size) # ADAO & check shape _Innovation = _Innovation - Cm @ Un # Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha)) diff --git a/src/daComposant/daAlgorithms/Atoms/ecwblue.py b/src/daComposant/daAlgorithms/Atoms/ecwblue.py index e21dcf0..0e62f6a 100644 --- a/src/daComposant/daAlgorithms/Atoms/ecwblue.py +++ b/src/daComposant/daAlgorithms/Atoms/ecwblue.py @@ -31,9 +31,9 @@ from daCore.PlatformInfo import PlatformInfo mpr = PlatformInfo().MachinePrecision() # ============================================================================== -def ecwblue(selfA, Xb, Y, HO, R, B): +def ecwblue(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False): """ - BLUE + Correction """ # # Initialisations @@ -53,21 +53,41 @@ def ecwblue(selfA, Xb, Y, HO, R, B): if max(Y.shape) != max(HXb.shape): raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape)) # - BI = B.getI() - RI = R.getI() + if selfA._parameters["StoreInternalVariables"] or \ + selfA._toStore("CostFunctionJ") or selfA._toStore("CostFunctionJAtCurrentOptimum") or \ + selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \ + selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \ + selfA._toStore("MahalanobisConsistency") or \ + (Y.size > Xb.size): + if isinstance(B,numpy.ndarray): + BI = numpy.linalg.inv(B) + else: + BI = B.getI() + RI = R.getI() # Innovation = Y - HXb + if selfA._parameters["EstimationOf"] == "Parameters": + if CM is not None and "Tangent" in CM and U is not None: # Attention : si Cm est aussi dans H, doublon ! + Cm = CM["Tangent"].asMatrix(Xb) + Cm = Cm.reshape(Xb.size,U.size) # ADAO & check shape + Innovation = Innovation - (Cm @ U).reshape((-1,1)) # - # Calcul de la matrice de gain et de l'analyse - # -------------------------------------------- + # Calcul de l'analyse + # ------------------- if Y.size <= Xb.size: - _A = R + numpy.dot(Hm, B * Ha) + _HNHt = numpy.dot(Hm, B @ Ha) + _A = R + _HNHt _u = numpy.linalg.solve( _A , Innovation ) - Xa = Xb + B * Ha * _u + Xa = Xb + (B @ (Ha @ _u)).reshape((-1,1)) else: - _A = BI + numpy.dot(Ha, RI * Hm) - _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI * Innovation) ) - Xa = Xb + _u + _HtRH = numpy.dot(Ha, RI @ Hm) + _A = BI + _HtRH + _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI @ Innovation) ) + Xa = Xb + _u.reshape((-1,1)) + # + if __storeState: selfA._setInternalState("Xn", Xa) + #-------------------------- + # selfA.StoredVariables["Analysis"].store( Xa ) # # Calcul de la fonction coût @@ -91,7 +111,7 @@ def ecwblue(selfA, Xb, Y, HO, R, B): selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \ selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \ selfA._toStore("MahalanobisConsistency"): - Jb = float( 0.5 * (Xa - Xb).T * (BI * (Xa - Xb)) ) + Jb = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) ) Jo = float( 0.5 * oma.T * (RI * oma) ) J = Jb + Jo selfA.StoredVariables["CostFunctionJb"].store( Jb ) diff --git a/src/daComposant/daAlgorithms/Atoms/ecwexblue.py b/src/daComposant/daAlgorithms/Atoms/ecwexblue.py index b94c4aa..8ddefc9 100644 --- a/src/daComposant/daAlgorithms/Atoms/ecwexblue.py +++ b/src/daComposant/daAlgorithms/Atoms/ecwexblue.py @@ -31,9 +31,9 @@ from daCore.PlatformInfo import PlatformInfo mpr = PlatformInfo().MachinePrecision() # ============================================================================== -def ecwexblue(selfA, Xb, Y, HO, R, B): +def ecwexblue(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False): """ - Extended BLUE + Correction """ # # Initialisations @@ -54,21 +54,41 @@ def ecwexblue(selfA, Xb, Y, HO, R, B): if max(Y.shape) != max(HXb.shape): raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape)) # - BI = B.getI() - RI = R.getI() + if selfA._parameters["StoreInternalVariables"] or \ + selfA._toStore("CostFunctionJ") or selfA._toStore("CostFunctionJAtCurrentOptimum") or \ + selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \ + selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \ + selfA._toStore("MahalanobisConsistency") or \ + (Y.size > Xb.size): + if isinstance(B,numpy.ndarray): + BI = numpy.linalg.inv(B) + else: + BI = B.getI() + RI = R.getI() # Innovation = Y - HXb + if selfA._parameters["EstimationOf"] == "Parameters": + if CM is not None and "Tangent" in CM and U is not None: # Attention : si Cm est aussi dans H, doublon ! + Cm = CM["Tangent"].asMatrix(Xb) + Cm = Cm.reshape(Xb.size,U.size) # ADAO & check shape + Innovation = Innovation - (Cm @ U).reshape((-1,1)) # - # Calcul de la matrice de gain et de l'analyse - # -------------------------------------------- + # Calcul de l'analyse + # ------------------- if Y.size <= Xb.size: - _A = R + numpy.dot(Hm, B * Ha) + _HNHt = numpy.dot(Hm, B @ Ha) + _A = R + _HNHt _u = numpy.linalg.solve( _A , Innovation ) - Xa = Xb + B * Ha * _u + Xa = Xb + (B @ (Ha @ _u)).reshape((-1,1)) else: - _A = BI + numpy.dot(Ha, RI * Hm) - _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI * Innovation) ) - Xa = Xb + _u + _HtRH = numpy.dot(Ha, RI @ Hm) + _A = BI + _HtRH + _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI @ Innovation) ) + Xa = Xb + _u.reshape((-1,1)) + # + if __storeState: selfA._setInternalState("Xn", Xa) + #-------------------------- + # selfA.StoredVariables["Analysis"].store( Xa ) # # Calcul de la fonction coût @@ -92,7 +112,7 @@ def ecwexblue(selfA, Xb, Y, HO, R, B): selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \ selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \ selfA._toStore("MahalanobisConsistency"): - Jb = float( 0.5 * (Xa - Xb).T * (BI * (Xa - Xb)) ) + Jb = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) ) Jo = float( 0.5 * oma.T * (RI * oma) ) J = Jb + Jo selfA.StoredVariables["CostFunctionJb"].store( Jb ) @@ -146,7 +166,7 @@ def ecwexblue(selfA, Xb, Y, HO, R, B): if selfA._toStore("MahalanobisConsistency"): selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*J/Innovation.size ) ) if selfA._toStore("SimulationQuantiles"): - HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa) + HtM = HO["Tangent"].asMatrix(Xa) HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape QuantilesEstimations(selfA, A, Xa, HXa, H, HtM) if selfA._toStore("SimulatedObservationAtBackground"): diff --git a/src/daComposant/daAlgorithms/Atoms/ecwlls.py b/src/daComposant/daAlgorithms/Atoms/ecwlls.py index 9971ac8..437135a 100644 --- a/src/daComposant/daAlgorithms/Atoms/ecwlls.py +++ b/src/daComposant/daAlgorithms/Atoms/ecwlls.py @@ -26,9 +26,9 @@ __doc__ = """ __author__ = "Jean-Philippe ARGAUD" # ============================================================================== -def ecwlls(selfA, Xb, Y, HO, R, B): +def ecwlls(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False): """ - Linear Least Squares + Correction """ # # Initialisations @@ -43,10 +43,14 @@ def ecwlls(selfA, Xb, Y, HO, R, B): else: RI = R.getI() # - # Calcul de la matrice de gain et de l'analyse - # -------------------------------------------- + # Calcul de l'analyse + # ------------------- K = (Ha * (RI * Hm)).I * Ha * RI Xa = K * Y + # + if __storeState: selfA._setInternalState("Xn", Xa) + #-------------------------- + # selfA.StoredVariables["Analysis"].store( Xa ) # # Calcul de la fonction coût diff --git a/src/daComposant/daAlgorithms/Atoms/ecwnlls.py b/src/daComposant/daAlgorithms/Atoms/ecwnlls.py index 013745d..ec3c4cb 100644 --- a/src/daComposant/daAlgorithms/Atoms/ecwnlls.py +++ b/src/daComposant/daAlgorithms/Atoms/ecwnlls.py @@ -28,9 +28,9 @@ __author__ = "Jean-Philippe ARGAUD" import numpy, scipy, scipy.optimize, scipy.version # ============================================================================== -def ecwnlls(selfA, Xb, Y, HO, R, B): +def ecwnlls(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False): """ - Non Linear Least Squares + Correction """ # # Initialisations @@ -217,6 +217,7 @@ def ecwnlls(selfA, Xb, Y, HO, R, B): Minimum = selfA.StoredVariables["CurrentState"][IndexMin] # Xa = Minimum + if __storeState: selfA._setInternalState("Xn", Xa) #-------------------------- # selfA.StoredVariables["Analysis"].store( Xa ) diff --git a/src/daComposant/daAlgorithms/Atoms/ecwstdkf.py b/src/daComposant/daAlgorithms/Atoms/ecwstdkf.py new file mode 100644 index 0000000..26adfb5 --- /dev/null +++ b/src/daComposant/daAlgorithms/Atoms/ecwstdkf.py @@ -0,0 +1,151 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2008-2022 EDF R&D +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# +# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com +# +# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D + +__doc__ = """ + Standard Kalman Filter +""" +__author__ = "Jean-Philippe ARGAUD" + +import numpy +from daCore.PlatformInfo import PlatformInfo +mpr = PlatformInfo().MachinePrecision() + +# ============================================================================== +def ecwstdkf(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False): + """ + Correction + """ + if selfA._parameters["EstimationOf"] == "Parameters": + selfA._parameters["StoreInternalVariables"] = True + # + # Initialisations + # --------------- + Hm = HO["Tangent"].asMatrix(Xb) + Hm = Hm.reshape(Y.size,Xb.size) # ADAO & check shape + Ha = HO["Adjoint"].asMatrix(Xb) + Ha = Ha.reshape(Xb.size,Y.size) # ADAO & check shape + # + HXb = Hm @ Xb + HXb = HXb.reshape((-1,1)) + if Y.size != HXb.size: + raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size)) + if max(Y.shape) != max(HXb.shape): + raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape)) + # + if selfA._parameters["StoreInternalVariables"] or \ + selfA._toStore("CostFunctionJ") or selfA._toStore("CostFunctionJAtCurrentOptimum") or \ + selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \ + selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \ + selfA._toStore("CurrentOptimum") or selfA._toStore("APosterioriCovariance") or \ + (Y.size > Xb.size): + if isinstance(B,numpy.ndarray): + BI = numpy.linalg.inv(B) + else: + BI = B.getI() + RI = R.getI() + # + Innovation = Y - HXb + if selfA._parameters["EstimationOf"] == "Parameters": + if CM is not None and "Tangent" in CM and U is not None: # Attention : si Cm est aussi dans H, doublon ! + Cm = CM["Tangent"].asMatrix(Xb) + Cm = Cm.reshape(Xb.size,U.size) # ADAO & check shape + Innovation = Innovation - (Cm @ U).reshape((-1,1)) + # + # Calcul de l'analyse + # ------------------- + if Y.size <= Xb.size: + _HNHt = numpy.dot(Hm, B @ Ha) + _A = R + _HNHt + _u = numpy.linalg.solve( _A , Innovation ) + Xa = Xb + (B @ (Ha @ _u)).reshape((-1,1)) + K = B @ (Ha @ numpy.linalg.inv(_A)) + else: + _HtRH = numpy.dot(Ha, RI @ Hm) + _A = BI + _HtRH + _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI @ Innovation) ) + Xa = Xb + _u.reshape((-1,1)) + K = numpy.linalg.inv(_A) @ (Ha @ RI.asfullmatrix(Y.size)) + # + Pa = B - K @ (Hm @ B) + Pa = (Pa + Pa.T) * 0.5 # Symétrie + Pa = Pa + mpr*numpy.trace( Pa ) * numpy.identity(Xa.size) # Positivité + # + if __storeState: selfA._setInternalState("Xn", Xa) + if __storeState: selfA._setInternalState("Pn", Pa) + #-------------------------- + # + selfA.StoredVariables["Analysis"].store( Xa ) + if selfA._toStore("SimulatedObservationAtCurrentAnalysis"): + selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Hm @ Xa ) + if selfA._toStore("InnovationAtCurrentAnalysis"): + selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( Innovation ) + # ---> avec current state + if selfA._parameters["StoreInternalVariables"] \ + or selfA._toStore("CurrentState"): + selfA.StoredVariables["CurrentState"].store( Xa ) + if selfA._toStore("BMA"): + selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) ) + if selfA._toStore("InnovationAtCurrentState"): + selfA.StoredVariables["InnovationAtCurrentState"].store( Innovation ) + if selfA._toStore("SimulatedObservationAtCurrentState") \ + or selfA._toStore("SimulatedObservationAtCurrentOptimum"): + selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HXb ) + # ---> autres + if selfA._parameters["StoreInternalVariables"] \ + or selfA._toStore("CostFunctionJ") \ + or selfA._toStore("CostFunctionJb") \ + or selfA._toStore("CostFunctionJo") \ + or selfA._toStore("CurrentOptimum") or selfA._toStore("APosterioriCovariance"): + Jb = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) ) + Jo = float( 0.5 * Innovation.T @ (RI @ Innovation) ) + J = Jb + Jo + selfA.StoredVariables["CostFunctionJb"].store( Jb ) + selfA.StoredVariables["CostFunctionJo"].store( Jo ) + selfA.StoredVariables["CostFunctionJ" ].store( J ) + # + if selfA._toStore("IndexOfOptimum") \ + or selfA._toStore("CurrentOptimum") \ + or selfA._toStore("CostFunctionJAtCurrentOptimum") \ + or selfA._toStore("CostFunctionJbAtCurrentOptimum") \ + or selfA._toStore("CostFunctionJoAtCurrentOptimum") \ + or selfA._toStore("SimulatedObservationAtCurrentOptimum"): + IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][:] ) + if selfA._toStore("IndexOfOptimum"): + selfA.StoredVariables["IndexOfOptimum"].store( IndexMin ) + if selfA._toStore("CurrentOptimum"): + selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] ) + if selfA._toStore("SimulatedObservationAtCurrentOptimum"): + selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] ) + if selfA._toStore("CostFunctionJbAtCurrentOptimum"): + selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] ) + if selfA._toStore("CostFunctionJoAtCurrentOptimum"): + selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] ) + if selfA._toStore("CostFunctionJAtCurrentOptimum"): + selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] ) + if selfA._toStore("APosterioriCovariance"): + selfA.StoredVariables["APosterioriCovariance"].store( Pa ) + # + return 0 + +# ============================================================================== +if __name__ == "__main__": + print('\n AUTODIAGNOSTIC\n') diff --git a/src/daComposant/daAlgorithms/Atoms/exkf.py b/src/daComposant/daAlgorithms/Atoms/exkf.py index 19e87c6..6936fc9 100644 --- a/src/daComposant/daAlgorithms/Atoms/exkf.py +++ b/src/daComposant/daAlgorithms/Atoms/exkf.py @@ -38,17 +38,6 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): if selfA._parameters["EstimationOf"] == "Parameters": selfA._parameters["StoreInternalVariables"] = True # - # Opérateurs - H = HO["Direct"].appliedControledFormTo - # - if selfA._parameters["EstimationOf"] == "State": - M = EM["Direct"].appliedControledFormTo - # - if CM is not None and "Tangent" in CM and U is not None: - Cm = CM["Tangent"].asMatrix(Xb) - else: - Cm = None - # # Durée d'observation et tailles if hasattr(Y,"stepnumber"): duration = Y.stepnumber() @@ -90,21 +79,6 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): previousJMinimum = numpy.finfo(float).max # for step in range(duration-1): - if hasattr(Y,"store"): - Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1)) - else: - Ynpu = numpy.ravel( Y ).reshape((__p,1)) - # - Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn) - Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape - Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn) - Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape - # - if selfA._parameters["EstimationOf"] == "State": - Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn) - Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape - Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn) - Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape # if U is not None: if hasattr(U,"store") and len(U)>1: @@ -117,8 +91,14 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): Un = None # if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast + Mt = EM["Tangent"].asMatrix(Xn) + Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape + Ma = EM["Adjoint"].asMatrix(Xn) + Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape + M = EM["Direct"].appliedControledFormTo Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1)) - if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon ! + if CM is not None and "Tangent" in CM and Un is not None: # Attention : si Cm est aussi dans M, doublon ! + Cm = CM["Tangent"].asMatrix(Xn_predicted) Cm = Cm.reshape(__n,Un.size) # ADAO & check shape Xn_predicted = Xn_predicted + Cm @ Un Pn_predicted = Q + Mt * (Pn * Ma) @@ -127,13 +107,26 @@ def exkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): Xn_predicted = Xn Pn_predicted = Pn # + if hasattr(Y,"store"): + Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1)) + else: + Ynpu = numpy.ravel( Y ).reshape((__p,1)) + # + Ht = HO["Tangent"].asMatrix(Xn_predicted) + Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape + Ha = HO["Adjoint"].asMatrix(Xn_predicted) + Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape + H = HO["Direct"].appliedControledFormTo + # if selfA._parameters["EstimationOf"] == "State": HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1)) _Innovation = Ynpu - HX_predicted elif selfA._parameters["EstimationOf"] == "Parameters": HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1)) _Innovation = Ynpu - HX_predicted - if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon ! + if CM is not None and "Tangent" in CM and Un is not None: # Attention : si Cm est aussi dans H, doublon ! + Cm = CM["Tangent"].asMatrix(Xn_predicted) + Cm = Cm.reshape(__n,Un.size) # ADAO & check shape _Innovation = _Innovation - Cm @ Un # Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha)) diff --git a/src/daComposant/daAlgorithms/Atoms/incr3dvar.py b/src/daComposant/daAlgorithms/Atoms/incr3dvar.py index d936901..10b903c 100644 --- a/src/daComposant/daAlgorithms/Atoms/incr3dvar.py +++ b/src/daComposant/daAlgorithms/Atoms/incr3dvar.py @@ -32,9 +32,9 @@ from daCore.PlatformInfo import PlatformInfo mpr = PlatformInfo().MachinePrecision() # ============================================================================== -def incr3dvar(selfA, Xb, Y, HO, R, B): +def incr3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False): """ - 3DVAR incrémental + Correction """ # # Initialisations @@ -212,6 +212,7 @@ def incr3dvar(selfA, Xb, Y, HO, R, B): iOuter = selfA.StoredVariables["CurrentIterationNumber"][-1] # Xa = Xr + if __storeState: selfA._setInternalState("Xn", Xa) #-------------------------- # selfA.StoredVariables["Analysis"].store( Xa ) diff --git a/src/daComposant/daAlgorithms/Atoms/psas3dvar.py b/src/daComposant/daAlgorithms/Atoms/psas3dvar.py index 4e9b471..8f69ece 100644 --- a/src/daComposant/daAlgorithms/Atoms/psas3dvar.py +++ b/src/daComposant/daAlgorithms/Atoms/psas3dvar.py @@ -29,9 +29,9 @@ import numpy, scipy, scipy.optimize, scipy.version from daCore.NumericObjects import HessienneEstimation, QuantilesEstimations # ============================================================================== -def psas3dvar(selfA, Xb, Y, HO, R, B): +def psas3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False): """ - 3DVAR PSAS + Correction """ # # Initialisations @@ -48,16 +48,15 @@ def psas3dvar(selfA, Xb, Y, HO, R, B): if max(Y.shape) != max(HXb.shape): raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape)) # - if selfA._toStore("JacobianMatrixAtBackground"): - HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb) - HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape - selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb ) - # Ht = HO["Tangent"].asMatrix(Xb) + Ht = Ht.reshape(Y.size,Xb.size) # ADAO & check shape BHT = B * Ht.T HBHTpR = R + Ht * BHT Innovation = Y - HXb # + if selfA._toStore("JacobianMatrixAtBackground"): + selfA.StoredVariables["JacobianMatrixAtBackground"].store( Ht ) + # Xini = numpy.zeros(Y.size) # # Définition de la fonction-coût @@ -189,6 +188,7 @@ def psas3dvar(selfA, Xb, Y, HO, R, B): Minimum = Xb + BHT @ Minimum.reshape((-1,1)) # Xa = Minimum + if __storeState: selfA._setInternalState("Xn", Xa) #-------------------------- # selfA.StoredVariables["Analysis"].store( Xa ) diff --git a/src/daComposant/daAlgorithms/Atoms/std3dvar.py b/src/daComposant/daAlgorithms/Atoms/std3dvar.py index e6fb211..5ee2976 100644 --- a/src/daComposant/daAlgorithms/Atoms/std3dvar.py +++ b/src/daComposant/daAlgorithms/Atoms/std3dvar.py @@ -29,9 +29,9 @@ import numpy, scipy, scipy.optimize, scipy.version from daCore.NumericObjects import HessienneEstimation, QuantilesEstimations # ============================================================================== -def std3dvar(selfA, Xb, Y, HO, R, B): +def std3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False): """ - 3DVAR + Correction """ # # Initialisations @@ -50,7 +50,7 @@ def std3dvar(selfA, Xb, Y, HO, R, B): raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape)) # if selfA._toStore("JacobianMatrixAtBackground"): - HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb) + HtMb = HO["Tangent"].asMatrix(Xb) HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb ) # @@ -191,6 +191,7 @@ def std3dvar(selfA, Xb, Y, HO, R, B): Minimum = selfA.StoredVariables["CurrentState"][IndexMin] # Xa = Minimum + if __storeState: selfA._setInternalState("Xn", Xa) #-------------------------- # selfA.StoredVariables["Analysis"].store( Xa ) diff --git a/src/daComposant/daAlgorithms/Atoms/std4dvar.py b/src/daComposant/daAlgorithms/Atoms/std4dvar.py index fbcebc1..1eca826 100644 --- a/src/daComposant/daAlgorithms/Atoms/std4dvar.py +++ b/src/daComposant/daAlgorithms/Atoms/std4dvar.py @@ -34,7 +34,7 @@ mfp = PlatformInfo().MaximumPrecision() # ============================================================================== def std4dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): """ - 4DVAR + Correction """ # # Initialisations diff --git a/src/daComposant/daAlgorithms/Atoms/stdkf.py b/src/daComposant/daAlgorithms/Atoms/stdkf.py deleted file mode 100644 index b383d90..0000000 --- a/src/daComposant/daAlgorithms/Atoms/stdkf.py +++ /dev/null @@ -1,219 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2008-2022 EDF R&D -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -# -# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com -# -# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D - -__doc__ = """ - Standard Kalman Filter -""" -__author__ = "Jean-Philippe ARGAUD" - -import numpy -from daCore.PlatformInfo import PlatformInfo -mpr = PlatformInfo().MachinePrecision() -mfp = PlatformInfo().MaximumPrecision() - -# ============================================================================== -def stdkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): - """ - Standard Kalman Filter - """ - if selfA._parameters["EstimationOf"] == "Parameters": - selfA._parameters["StoreInternalVariables"] = True - # - # Opérateurs - # ---------- - Ht = HO["Tangent"].asMatrix(Xb) - Ha = HO["Adjoint"].asMatrix(Xb) - # - if selfA._parameters["EstimationOf"] == "State": - Mt = EM["Tangent"].asMatrix(Xb) - Ma = EM["Adjoint"].asMatrix(Xb) - # - if CM is not None and "Tangent" in CM and U is not None: - Cm = CM["Tangent"].asMatrix(Xb) - else: - Cm = None - # - # Durée d'observation et tailles - if hasattr(Y,"stepnumber"): - duration = Y.stepnumber() - __p = numpy.cumprod(Y.shape())[-1] - else: - duration = 2 - __p = numpy.array(Y).size - # - # Précalcul des inversions de B et R - if selfA._parameters["StoreInternalVariables"] \ - or selfA._toStore("CostFunctionJ") \ - or selfA._toStore("CostFunctionJb") \ - or selfA._toStore("CostFunctionJo") \ - or selfA._toStore("CurrentOptimum") \ - or selfA._toStore("APosterioriCovariance"): - BI = B.getI() - RI = R.getI() - # - __n = Xb.size - nbPreviousSteps = len(selfA.StoredVariables["Analysis"]) - # - if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]: - Xn = Xb - Pn = B - selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) ) - selfA.StoredVariables["Analysis"].store( Xb ) - if selfA._toStore("APosterioriCovariance"): - if hasattr(B,"asfullmatrix"): - selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) ) - else: - selfA.StoredVariables["APosterioriCovariance"].store( B ) - selfA._setInternalState("seed", numpy.random.get_state()) - elif selfA._parameters["nextStep"]: - Xn = selfA._getInternalState("Xn") - Pn = selfA._getInternalState("Pn") - # - if selfA._parameters["EstimationOf"] == "Parameters": - XaMin = Xn - previousJMinimum = numpy.finfo(float).max - # - for step in range(duration-1): - if hasattr(Y,"store"): - Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1)) - else: - Ynpu = numpy.ravel( Y ).reshape((__p,1)) - # - if U is not None: - if hasattr(U,"store") and len(U)>1: - Un = numpy.ravel( U[step] ).reshape((-1,1)) - elif hasattr(U,"store") and len(U)==1: - Un = numpy.ravel( U[0] ).reshape((-1,1)) - else: - Un = numpy.ravel( U ).reshape((-1,1)) - else: - Un = None - # - if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast - Xn_predicted = Mt @ Xn - if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon ! - Cm = Cm.reshape(__n,Un.size) # ADAO & check shape - Xn_predicted = Xn_predicted + Cm @ Un - Pn_predicted = Q + Mt * (Pn * Ma) - elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast - # --- > Par principe, M = Id, Q = 0 - Xn_predicted = Xn - Pn_predicted = Pn - # - if selfA._parameters["EstimationOf"] == "State": - HX_predicted = Ht @ Xn_predicted - _Innovation = Ynpu - HX_predicted - elif selfA._parameters["EstimationOf"] == "Parameters": - HX_predicted = Ht @ Xn_predicted - _Innovation = Ynpu - HX_predicted - if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon ! - _Innovation = _Innovation - Cm @ Un - # - Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha)) - Xn = Xn_predicted + Kn * _Innovation - Pn = Pn_predicted - Kn * Ht * Pn_predicted - # - Xa = Xn # Pointeurs - #-------------------------- - selfA._setInternalState("Xn", Xn) - selfA._setInternalState("Pn", Pn) - #-------------------------- - # - selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) ) - # ---> avec analysis - selfA.StoredVariables["Analysis"].store( Xa ) - if selfA._toStore("SimulatedObservationAtCurrentAnalysis"): - selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Ht * Xa ) - if selfA._toStore("InnovationAtCurrentAnalysis"): - selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation ) - # ---> avec current state - if selfA._parameters["StoreInternalVariables"] \ - or selfA._toStore("CurrentState"): - selfA.StoredVariables["CurrentState"].store( Xn ) - if selfA._toStore("ForecastState"): - selfA.StoredVariables["ForecastState"].store( Xn_predicted ) - if selfA._toStore("ForecastCovariance"): - selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted ) - if selfA._toStore("BMA"): - selfA.StoredVariables["BMA"].store( Xn_predicted - Xa ) - if selfA._toStore("InnovationAtCurrentState"): - selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation ) - if selfA._toStore("SimulatedObservationAtCurrentState") \ - or selfA._toStore("SimulatedObservationAtCurrentOptimum"): - selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted ) - # ---> autres - if selfA._parameters["StoreInternalVariables"] \ - or selfA._toStore("CostFunctionJ") \ - or selfA._toStore("CostFunctionJb") \ - or selfA._toStore("CostFunctionJo") \ - or selfA._toStore("CurrentOptimum") \ - or selfA._toStore("APosterioriCovariance"): - Jb = float( 0.5 * (Xa - Xb).T * (BI * (Xa - Xb)) ) - Jo = float( 0.5 * _Innovation.T * (RI * _Innovation) ) - J = Jb + Jo - selfA.StoredVariables["CostFunctionJb"].store( Jb ) - selfA.StoredVariables["CostFunctionJo"].store( Jo ) - selfA.StoredVariables["CostFunctionJ" ].store( J ) - # - if selfA._toStore("IndexOfOptimum") \ - or selfA._toStore("CurrentOptimum") \ - or selfA._toStore("CostFunctionJAtCurrentOptimum") \ - or selfA._toStore("CostFunctionJbAtCurrentOptimum") \ - or selfA._toStore("CostFunctionJoAtCurrentOptimum") \ - or selfA._toStore("SimulatedObservationAtCurrentOptimum"): - IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps - if selfA._toStore("IndexOfOptimum"): - selfA.StoredVariables["IndexOfOptimum"].store( IndexMin ) - if selfA._toStore("CurrentOptimum"): - selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] ) - if selfA._toStore("SimulatedObservationAtCurrentOptimum"): - selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] ) - if selfA._toStore("CostFunctionJbAtCurrentOptimum"): - selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] ) - if selfA._toStore("CostFunctionJoAtCurrentOptimum"): - selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] ) - if selfA._toStore("CostFunctionJAtCurrentOptimum"): - selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] ) - if selfA._toStore("APosterioriCovariance"): - selfA.StoredVariables["APosterioriCovariance"].store( Pn ) - if selfA._parameters["EstimationOf"] == "Parameters" \ - and J < previousJMinimum: - previousJMinimum = J - XaMin = Xa - if selfA._toStore("APosterioriCovariance"): - covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1] - # - # Stockage final supplémentaire de l'optimum en estimation de paramètres - # ---------------------------------------------------------------------- - if selfA._parameters["EstimationOf"] == "Parameters": - selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) ) - selfA.StoredVariables["Analysis"].store( XaMin ) - if selfA._toStore("APosterioriCovariance"): - selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin ) - if selfA._toStore("BMA"): - selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) ) - # - return 0 - -# ============================================================================== -if __name__ == "__main__": - print('\n AUTODIAGNOSTIC\n') diff --git a/src/daComposant/daAlgorithms/Atoms/uskf.py b/src/daComposant/daAlgorithms/Atoms/uskf.py index 50b98ce..a019769 100644 --- a/src/daComposant/daAlgorithms/Atoms/uskf.py +++ b/src/daComposant/daAlgorithms/Atoms/uskf.py @@ -61,17 +61,6 @@ def uskf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): Wc = numpy.array( Ww ) Wc[0] = Lambda / (L + Lambda) + (1. - Alpha**2 + Beta) # - # Opérateurs - Hm = HO["Direct"].appliedControledFormTo - # - if selfA._parameters["EstimationOf"] == "State": - Mm = EM["Direct"].appliedControledFormTo - # - if CM is not None and "Tangent" in CM and U is not None: - Cm = CM["Tangent"].asMatrix(Xb) - else: - Cm = None - # # Durée d'observation et tailles if hasattr(Y,"stepnumber"): duration = Y.stepnumber() @@ -112,10 +101,6 @@ def uskf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): previousJMinimum = numpy.finfo(float).max # for step in range(duration-1): - if hasattr(Y,"store"): - Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1)) - else: - Ynpu = numpy.ravel( Y ).reshape((__p,1)) # if U is not None: if hasattr(U,"store") and len(U)>1: @@ -127,6 +112,11 @@ def uskf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): else: Un = None # + if CM is not None and "Tangent" in CM and U is not None: + Cm = CM["Tangent"].asMatrix(Xn) + else: + Cm = None + # Pndemi = numpy.real(scipy.linalg.sqrtm(Pn)) Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi]) nbSpts = 2*Xn.size+1 @@ -134,6 +124,7 @@ def uskf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): XEtnnp = [] for point in range(nbSpts): if selfA._parameters["EstimationOf"] == "State": + Mm = EM["Direct"].appliedControledFormTo XEtnnpi = numpy.asarray( Mm( (Xnp[:,point], Un) ) ).reshape((-1,1)) if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon ! Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape @@ -155,6 +146,7 @@ def uskf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): # Xnnp = numpy.hstack([Xncm.reshape((-1,1)), Xncm.reshape((-1,1))+Gamma*Pnmdemi, Xncm.reshape((-1,1))-Gamma*Pnmdemi]) # + Hm = HO["Direct"].appliedControledFormTo Ynnp = [] for point in range(nbSpts): if selfA._parameters["EstimationOf"] == "State": @@ -172,6 +164,10 @@ def uskf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q): Pyyn += Wc[i] * ((Ynnp[:,point]-Yncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm)) Pxyn += Wc[i] * ((Xnnp[:,point]-Xncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm)) # + if hasattr(Y,"store"): + Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1)) + else: + Ynpu = numpy.ravel( Y ).reshape((__p,1)) _Innovation = Ynpu - Yncm.reshape((-1,1)) if selfA._parameters["EstimationOf"] == "Parameters": if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon ! diff --git a/src/daComposant/daAlgorithms/Atoms/van3dvar.py b/src/daComposant/daAlgorithms/Atoms/van3dvar.py index 93d608f..0e96130 100644 --- a/src/daComposant/daAlgorithms/Atoms/van3dvar.py +++ b/src/daComposant/daAlgorithms/Atoms/van3dvar.py @@ -32,9 +32,9 @@ from daCore.PlatformInfo import PlatformInfo mpr = PlatformInfo().MachinePrecision() # ============================================================================== -def van3dvar(selfA, Xb, Y, HO, R, B): +def van3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False): """ - 3DVAR variational analysis with no inversion of B + Correction """ # # Initialisations @@ -202,6 +202,7 @@ def van3dvar(selfA, Xb, Y, HO, R, B): Minimum = Xb + B * Minimum.reshape((-1,1)) # Pas @ # Xa = Minimum + if __storeState: selfA._setInternalState("Xn", Xa) #-------------------------- # selfA.StoredVariables["Analysis"].store( Xa ) diff --git a/src/daComposant/daAlgorithms/Blue.py b/src/daComposant/daAlgorithms/Blue.py index ccba80c..3eb6638 100644 --- a/src/daComposant/daAlgorithms/Blue.py +++ b/src/daComposant/daAlgorithms/Blue.py @@ -69,9 +69,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "CostFunctionJbAtCurrentOptimum", "CostFunctionJo", "CostFunctionJoAtCurrentOptimum", - "CurrentIterationNumber", "CurrentOptimum", "CurrentState", + "CurrentStepNumber", "ForecastState", "Innovation", "InnovationAtCurrentAnalysis", @@ -138,7 +138,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # #-------------------------- elif self._parameters["Variant"] == "OneCorrection": - ecwblue.ecwblue(self, Xb, Y, HO, R, B) + ecwblue.ecwblue(self, Xb, Y, U, HO, CM, R, B) # #-------------------------- else: diff --git a/src/daComposant/daAlgorithms/ExtendedBlue.py b/src/daComposant/daAlgorithms/ExtendedBlue.py index cc7b569..4ec2a13 100644 --- a/src/daComposant/daAlgorithms/ExtendedBlue.py +++ b/src/daComposant/daAlgorithms/ExtendedBlue.py @@ -71,6 +71,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "CostFunctionJoAtCurrentOptimum", "CurrentOptimum", "CurrentState", + "CurrentStepNumber", "ForecastState", "Innovation", "InnovationAtCurrentAnalysis", @@ -137,7 +138,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # #-------------------------- elif self._parameters["Variant"] == "OneCorrection": - ecwexblue.ecwexblue(self, Xb, Y, HO, R, B) + ecwexblue.ecwexblue(self, Xb, Y, U, HO, CM, R, B) # #-------------------------- else: diff --git a/src/daComposant/daAlgorithms/KalmanFilter.py b/src/daComposant/daAlgorithms/KalmanFilter.py index c06764c..99b2c4f 100644 --- a/src/daComposant/daAlgorithms/KalmanFilter.py +++ b/src/daComposant/daAlgorithms/KalmanFilter.py @@ -20,18 +20,28 @@ # # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D -from daCore import BasicObjects -from daAlgorithms.Atoms import stdkf +from daCore import BasicObjects, NumericObjects +from daAlgorithms.Atoms import ecwstdkf # ============================================================================== class ElementaryAlgorithm(BasicObjects.Algorithm): def __init__(self): BasicObjects.Algorithm.__init__(self, "KALMANFILTER") + self.defineRequiredParameter( + name = "Variant", + default = "KalmanFilter", + typecast = str, + message = "Variant ou formulation de la méthode", + listval = [ + "KalmanFilter", + "OneCorrection", + ], + ) self.defineRequiredParameter( name = "EstimationOf", default = "State", typecast = str, - message = "Estimation d'etat ou de parametres", + message = "Estimation d'état ou de paramètres", listval = ["State", "Parameters"], ) self.defineRequiredParameter( @@ -58,9 +68,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "CostFunctionJbAtCurrentOptimum", "CostFunctionJo", "CostFunctionJoAtCurrentOptimum", - "CurrentIterationNumber", "CurrentOptimum", "CurrentState", + "CurrentStepNumber", "ForecastCovariance", "ForecastState", "IndexOfOptimum", @@ -86,8 +96,16 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q) # #-------------------------- - stdkf.stdkf(self, Xb, Y, U, HO, EM, CM, R, B, Q) + if self._parameters["Variant"] == "KalmanFilter": + NumericObjects.multiXOsteps(self, Xb, Y, U, HO, EM, CM, R, B, Q, ecwstdkf.ecwstdkf, True, True) + # + #-------------------------- + elif self._parameters["Variant"] == "OneCorrection": + ecwstdkf.ecwstdkf(self, Xb, Y, U, HO, CM, R, B) + # #-------------------------- + else: + raise ValueError("Error in Variant name: %s"%self._parameters["Variant"]) # self._post_run(HO) return 0 diff --git a/src/daComposant/daAlgorithms/LinearLeastSquares.py b/src/daComposant/daAlgorithms/LinearLeastSquares.py index e5baedc..e4a3f0b 100644 --- a/src/daComposant/daAlgorithms/LinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/LinearLeastSquares.py @@ -65,6 +65,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "CostFunctionJoAtCurrentOptimum", "CurrentOptimum", "CurrentState", + "CurrentStepNumber", "ForecastState", "InnovationAtCurrentAnalysis", "OMA", @@ -92,7 +93,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # #-------------------------- elif self._parameters["Variant"] == "OneCorrection": - ecwlls.ecwlls(self, Xb, Y, HO, R, B) + ecwlls.ecwlls(self, Xb, Y, U, HO, CM, R, B) # #-------------------------- else: diff --git a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py index 00e8940..19e5aea 100644 --- a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py @@ -110,6 +110,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "CurrentIterationNumber", "CurrentOptimum", "CurrentState", + "CurrentStepNumber", "ForecastState", "IndexOfOptimum", "Innovation", @@ -151,7 +152,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # #-------------------------- elif self._parameters["Variant"] == "OneCorrection": - ecwnlls.ecwnlls(self, Xb, Y, HO, R, B) + ecwnlls.ecwnlls(self, Xb, Y, U, HO, CM, R, B) # #-------------------------- else: diff --git a/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py b/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py index f852864..c14407b 100644 --- a/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py +++ b/src/daComposant/daAlgorithms/UnscentedKalmanFilter.py @@ -104,7 +104,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "CostFunctionJbAtCurrentOptimum", "CostFunctionJo", "CostFunctionJoAtCurrentOptimum", - "CurrentIterationNumber", "CurrentOptimum", "CurrentState", "ForecastCovariance", diff --git a/src/daComposant/daCore/NumericObjects.py b/src/daComposant/daCore/NumericObjects.py index d1fbf96..52c90b8 100644 --- a/src/daComposant/daCore/NumericObjects.py +++ b/src/daComposant/daCore/NumericObjects.py @@ -850,14 +850,16 @@ def Apply3DVarRecentringOnEnsemble(__EnXn, __EnXf, __Ynpu, __HO, __R, __B, __Bet selfB._parameters["Bounds"] = None selfB._parameters["InitializationPoint"] = Xf from daAlgorithms.Atoms import std3dvar - std3dvar.std3dvar(selfB, Xf, __Ynpu, __HO, __R, Pf) + std3dvar.std3dvar(selfB, Xf, __Ynpu, None, __HO, None, __R, Pf) Xa = selfB.get("Analysis")[-1].reshape((-1,1)) del selfB # return Xa + EnsembleOfAnomalies( __EnXn ) # ============================================================================== -def multiXOsteps(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, oneCycle): +def multiXOsteps(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, oneCycle, + __CovForecast = False, __LinEvolution = False, + ): """ Prévision multi-pas avec une correction par pas (multi-méthodes) """ @@ -867,19 +869,20 @@ def multiXOsteps(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, oneCycle): if selfA._parameters["EstimationOf"] == "State": if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]: Xn = numpy.asarray(Xb) + if __CovForecast: Pn = B selfA.StoredVariables["Analysis"].store( Xn ) if selfA._toStore("APosterioriCovariance"): if hasattr(B,"asfullmatrix"): selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(Xn.size) ) else: selfA.StoredVariables["APosterioriCovariance"].store( B ) - if selfA._toStore("ForecastState"): - selfA.StoredVariables["ForecastState"].store( Xn ) selfA._setInternalState("seed", numpy.random.get_state()) elif selfA._parameters["nextStep"]: Xn = selfA._getInternalState("Xn") + if __CovForecast: Pn = selfA._getInternalState("Pn") else: Xn = numpy.asarray(Xb) + if __CovForecast: Pn = B # if hasattr(Y,"stepnumber"): duration = Y.stepnumber() @@ -889,6 +892,8 @@ def multiXOsteps(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, oneCycle): # Multi-steps # ----------- for step in range(duration-1): + selfA.StoredVariables["CurrentStepNumber"].store( len(selfA.StoredVariables["Analysis"]) ) + # if hasattr(Y,"store"): Ynpu = numpy.asarray( Y[step+1] ).reshape((-1,1)) else: @@ -904,29 +909,50 @@ def multiXOsteps(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, oneCycle): else: Un = None # - if selfA._parameters["EstimationOf"] == "State": # Forecast - M = EM["Direct"].appliedControledFormTo - if CM is not None and "Tangent" in CM and Un is not None: - Cm = CM["Tangent"].asMatrix(Xn) + # Predict (Time Update) + # --------------------- + if selfA._parameters["EstimationOf"] == "State": + if __CovForecast or __LinEvolution: + Mt = EM["Tangent"].asMatrix(Xn) + Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape + if __CovForecast: + Ma = EM["Adjoint"].asMatrix(Xn) + Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape + Pn_predicted = Q + Mt @ (Pn @ Ma) + if __LinEvolution: + Xn_predicted = Mt @ Xn else: - Cm = None - # - Xn_predicted = M( (Xn, Un) ) - if selfA._toStore("ForecastState"): - selfA.StoredVariables["ForecastState"].store( Xn_predicted ) - if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon ! + M = EM["Direct"].appliedControledFormTo + Xn_predicted = M( (Xn, Un) ) + if CM is not None and "Tangent" in CM and Un is not None: # Attention : si Cm est aussi dans M, doublon ! + Cm = CM["Tangent"].asMatrix(Xn_predicted) Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape - Xn_predicted = Xn_predicted + Cm @ Un + Xn_predicted = Xn_predicted + (Cm @ Un).reshape((-1,1)) elif selfA._parameters["EstimationOf"] == "Parameters": # No forecast # --- > Par principe, M = Id, Q = 0 Xn_predicted = Xn + if __CovForecast: Pn_predicted = Pn Xn_predicted = numpy.asarray(Xn_predicted).reshape((-1,1)) + if selfA._toStore("ForecastState"): + selfA.StoredVariables["ForecastState"].store( Xn_predicted ) + if __CovForecast: + if hasattr(Pn_predicted,"asfullmatrix"): + Pn_predicted = Pn_predicted.asfullmatrix(Xn.size) + else: + Pn_predicted = numpy.asarray(Pn_predicted).reshape((Xn.size,Xn.size)) + if selfA._toStore("ForecastCovariance"): + selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted ) # - oneCycle(selfA, Xn_predicted, Ynpu, HO, R, B) # Correct + # Correct (Measurement Update) + # ---------------------------- + if __CovForecast: + oneCycle(selfA, Xn_predicted, Ynpu, Un, HO, CM, R, Pn_predicted, True) + else: + oneCycle(selfA, Xn_predicted, Ynpu, Un, HO, CM, R, B, True) # - Xn = selfA.StoredVariables["Analysis"][-1] #-------------------------- - selfA._setInternalState("Xn", Xn) + Xn = selfA._getInternalState("Xn") + if __CovForecast: Pn = selfA._getInternalState("Pn") # return 0 -- 2.39.2