"CurrentIterationNumber",
"CurrentOptimum",
"CurrentState",
+ "CurrentStepNumber",
"ForecastState",
"IndexOfOptimum",
"Innovation",
NumericObjects.multiXOsteps(self, Xb, Y, U, HO, EM, CM, R, B, Q, psas3dvar.psas3dvar)
#
#--------------------------
- elif self._parameters["Variant"] in ["OneCorrection", "OneCorrection3DVAR-Std"]:
- std3dvar.std3dvar(self, Xb, Y, HO, R, B)
+ elif self._parameters["Variant"] == "OneCorrection":
+ std3dvar.std3dvar(self, Xb, Y, U, HO, CM, R, B)
#
#--------------------------
else:
__author__ = "Jean-Philippe ARGAUD"
import math, numpy, scipy
-from daCore.NumericObjects import ForceNumericBounds
-from daCore.NumericObjects import ApplyBounds
+from daCore.NumericObjects import ApplyBounds, ForceNumericBounds
from daCore.PlatformInfo import PlatformInfo
mpr = PlatformInfo().MachinePrecision()
mfp = PlatformInfo().MaximumPrecision()
Wc = numpy.array( Ww )
Wc[0] = Lambda / (L + Lambda) + (1. - Alpha**2 + Beta)
#
- # Opérateurs
- Hm = HO["Direct"].appliedControledFormTo
- #
- if selfA._parameters["EstimationOf"] == "State":
- Mm = EM["Direct"].appliedControledFormTo
- #
- if CM is not None and "Tangent" in CM and U is not None:
- Cm = CM["Tangent"].asMatrix(Xb)
- else:
- Cm = None
- #
# Durée d'observation et tailles
if hasattr(Y,"stepnumber"):
duration = Y.stepnumber()
previousJMinimum = numpy.finfo(float).max
#
for step in range(duration-1):
- if hasattr(Y,"store"):
- Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
- else:
- Ynpu = numpy.ravel( Y ).reshape((__p,1))
#
if U is not None:
if hasattr(U,"store") and len(U)>1:
else:
Un = None
#
+ if CM is not None and "Tangent" in CM and U is not None:
+ Cm = CM["Tangent"].asMatrix(Xn)
+ else:
+ Cm = None
+ #
Pndemi = numpy.real(scipy.linalg.sqrtm(Pn))
Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi])
nbSpts = 2*Xn.size+1
XEtnnp = []
for point in range(nbSpts):
if selfA._parameters["EstimationOf"] == "State":
+ Mm = EM["Direct"].appliedControledFormTo
XEtnnpi = numpy.asarray( Mm( (Xnp[:,point], Un) ) ).reshape((-1,1))
if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
for point in range(nbSpts):
Xnnp[:,point] = ApplyBounds( Xnnp[:,point], selfA._parameters["Bounds"] )
#
+ Hm = HO["Direct"].appliedControledFormTo
Ynnp = []
for point in range(nbSpts):
if selfA._parameters["EstimationOf"] == "State":
Pyyn += Wc[i] * ((Ynnp[:,point]-Yncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm))
Pxyn += Wc[i] * ((Xnnp[:,point]-Xncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm))
#
+ if hasattr(Y,"store"):
+ Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
+ else:
+ Ynpu = numpy.ravel( Y ).reshape((__p,1))
_Innovation = Ynpu - Yncm.reshape((-1,1))
if selfA._parameters["EstimationOf"] == "Parameters":
if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
__author__ = "Jean-Philippe ARGAUD"
import numpy
-from daCore.NumericObjects import ForceNumericBounds
-from daCore.NumericObjects import ApplyBounds
+from daCore.NumericObjects import ApplyBounds, ForceNumericBounds
from daCore.PlatformInfo import PlatformInfo
mpr = PlatformInfo().MachinePrecision()
mfp = PlatformInfo().MaximumPrecision()
selfA._parameters["StoreInternalVariables"] = True
selfA._parameters["Bounds"] = ForceNumericBounds( selfA._parameters["Bounds"] )
#
- # Opérateurs
- H = HO["Direct"].appliedControledFormTo
- #
- if selfA._parameters["EstimationOf"] == "State":
- M = EM["Direct"].appliedControledFormTo
- #
- if CM is not None and "Tangent" in CM and U is not None:
- Cm = CM["Tangent"].asMatrix(Xb)
- else:
- Cm = None
- #
# Durée d'observation et tailles
if hasattr(Y,"stepnumber"):
duration = Y.stepnumber()
previousJMinimum = numpy.finfo(float).max
#
for step in range(duration-1):
- if hasattr(Y,"store"):
- Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
- else:
- Ynpu = numpy.ravel( Y ).reshape((__p,1))
- #
- Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
- Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
- Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
- Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
- #
- if selfA._parameters["EstimationOf"] == "State":
- Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
- Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
- Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
- Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
#
if U is not None:
if hasattr(U,"store") and len(U)>1:
Xn = ApplyBounds( Xn, selfA._parameters["Bounds"] )
#
if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
+ Mt = EM["Tangent"].asMatrix(Xn)
+ Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
+ Ma = EM["Adjoint"].asMatrix(Xn)
+ Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
+ M = EM["Direct"].appliedControledFormTo
Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1))
- if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
+ if CM is not None and "Tangent" in CM and Un is not None: # Attention : si Cm est aussi dans M, doublon !
+ Cm = CM["Tangent"].asMatrix(Xn_predicted)
Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
Xn_predicted = Xn_predicted + Cm @ Un
Pn_predicted = Q + Mt * (Pn * Ma)
if selfA._parameters["Bounds"] is not None and selfA._parameters["ConstrainedBy"] == "EstimateProjection":
Xn_predicted = ApplyBounds( Xn_predicted, selfA._parameters["Bounds"] )
#
+ if hasattr(Y,"store"):
+ Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
+ else:
+ Ynpu = numpy.ravel( Y ).reshape((__p,1))
+ #
+ Ht = HO["Tangent"].asMatrix(Xn_predicted)
+ Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
+ Ha = HO["Adjoint"].asMatrix(Xn_predicted)
+ Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
+ H = HO["Direct"].appliedControledFormTo
+ #
if selfA._parameters["EstimationOf"] == "State":
HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1))
_Innovation = Ynpu - HX_predicted
elif selfA._parameters["EstimationOf"] == "Parameters":
HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1))
_Innovation = Ynpu - HX_predicted
- if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
+ if CM is not None and "Tangent" in CM and Un is not None: # Attention : si Cm est aussi dans H, doublon !
+ Cm = CM["Tangent"].asMatrix(Xn_predicted)
+ Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
_Innovation = _Innovation - Cm @ Un
#
Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
mpr = PlatformInfo().MachinePrecision()
# ==============================================================================
-def ecwblue(selfA, Xb, Y, HO, R, B):
+def ecwblue(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
"""
- BLUE
+ Correction
"""
#
# Initialisations
if max(Y.shape) != max(HXb.shape):
raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
#
- BI = B.getI()
- RI = R.getI()
+ if selfA._parameters["StoreInternalVariables"] or \
+ selfA._toStore("CostFunctionJ") or selfA._toStore("CostFunctionJAtCurrentOptimum") or \
+ selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
+ selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
+ selfA._toStore("MahalanobisConsistency") or \
+ (Y.size > Xb.size):
+ if isinstance(B,numpy.ndarray):
+ BI = numpy.linalg.inv(B)
+ else:
+ BI = B.getI()
+ RI = R.getI()
#
Innovation = Y - HXb
+ if selfA._parameters["EstimationOf"] == "Parameters":
+ if CM is not None and "Tangent" in CM and U is not None: # Attention : si Cm est aussi dans H, doublon !
+ Cm = CM["Tangent"].asMatrix(Xb)
+ Cm = Cm.reshape(Xb.size,U.size) # ADAO & check shape
+ Innovation = Innovation - (Cm @ U).reshape((-1,1))
#
- # Calcul de la matrice de gain et de l'analyse
- # --------------------------------------------
+ # Calcul de l'analyse
+ # -------------------
if Y.size <= Xb.size:
- _A = R + numpy.dot(Hm, B * Ha)
+ _HNHt = numpy.dot(Hm, B @ Ha)
+ _A = R + _HNHt
_u = numpy.linalg.solve( _A , Innovation )
- Xa = Xb + B * Ha * _u
+ Xa = Xb + (B @ (Ha @ _u)).reshape((-1,1))
else:
- _A = BI + numpy.dot(Ha, RI * Hm)
- _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI * Innovation) )
- Xa = Xb + _u
+ _HtRH = numpy.dot(Ha, RI @ Hm)
+ _A = BI + _HtRH
+ _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI @ Innovation) )
+ Xa = Xb + _u.reshape((-1,1))
+ #
+ if __storeState: selfA._setInternalState("Xn", Xa)
+ #--------------------------
+ #
selfA.StoredVariables["Analysis"].store( Xa )
#
# Calcul de la fonction coût
selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
selfA._toStore("MahalanobisConsistency"):
- Jb = float( 0.5 * (Xa - Xb).T * (BI * (Xa - Xb)) )
+ Jb = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
Jo = float( 0.5 * oma.T * (RI * oma) )
J = Jb + Jo
selfA.StoredVariables["CostFunctionJb"].store( Jb )
mpr = PlatformInfo().MachinePrecision()
# ==============================================================================
-def ecwexblue(selfA, Xb, Y, HO, R, B):
+def ecwexblue(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
"""
- Extended BLUE
+ Correction
"""
#
# Initialisations
if max(Y.shape) != max(HXb.shape):
raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
#
- BI = B.getI()
- RI = R.getI()
+ if selfA._parameters["StoreInternalVariables"] or \
+ selfA._toStore("CostFunctionJ") or selfA._toStore("CostFunctionJAtCurrentOptimum") or \
+ selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
+ selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
+ selfA._toStore("MahalanobisConsistency") or \
+ (Y.size > Xb.size):
+ if isinstance(B,numpy.ndarray):
+ BI = numpy.linalg.inv(B)
+ else:
+ BI = B.getI()
+ RI = R.getI()
#
Innovation = Y - HXb
+ if selfA._parameters["EstimationOf"] == "Parameters":
+ if CM is not None and "Tangent" in CM and U is not None: # Attention : si Cm est aussi dans H, doublon !
+ Cm = CM["Tangent"].asMatrix(Xb)
+ Cm = Cm.reshape(Xb.size,U.size) # ADAO & check shape
+ Innovation = Innovation - (Cm @ U).reshape((-1,1))
#
- # Calcul de la matrice de gain et de l'analyse
- # --------------------------------------------
+ # Calcul de l'analyse
+ # -------------------
if Y.size <= Xb.size:
- _A = R + numpy.dot(Hm, B * Ha)
+ _HNHt = numpy.dot(Hm, B @ Ha)
+ _A = R + _HNHt
_u = numpy.linalg.solve( _A , Innovation )
- Xa = Xb + B * Ha * _u
+ Xa = Xb + (B @ (Ha @ _u)).reshape((-1,1))
else:
- _A = BI + numpy.dot(Ha, RI * Hm)
- _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI * Innovation) )
- Xa = Xb + _u
+ _HtRH = numpy.dot(Ha, RI @ Hm)
+ _A = BI + _HtRH
+ _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI @ Innovation) )
+ Xa = Xb + _u.reshape((-1,1))
+ #
+ if __storeState: selfA._setInternalState("Xn", Xa)
+ #--------------------------
+ #
selfA.StoredVariables["Analysis"].store( Xa )
#
# Calcul de la fonction coût
selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
selfA._toStore("MahalanobisConsistency"):
- Jb = float( 0.5 * (Xa - Xb).T * (BI * (Xa - Xb)) )
+ Jb = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
Jo = float( 0.5 * oma.T * (RI * oma) )
J = Jb + Jo
selfA.StoredVariables["CostFunctionJb"].store( Jb )
if selfA._toStore("MahalanobisConsistency"):
selfA.StoredVariables["MahalanobisConsistency"].store( float( 2.*J/Innovation.size ) )
if selfA._toStore("SimulationQuantiles"):
- HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
+ HtM = HO["Tangent"].asMatrix(Xa)
HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
QuantilesEstimations(selfA, A, Xa, HXa, H, HtM)
if selfA._toStore("SimulatedObservationAtBackground"):
__author__ = "Jean-Philippe ARGAUD"
# ==============================================================================
-def ecwlls(selfA, Xb, Y, HO, R, B):
+def ecwlls(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
"""
- Linear Least Squares
+ Correction
"""
#
# Initialisations
else:
RI = R.getI()
#
- # Calcul de la matrice de gain et de l'analyse
- # --------------------------------------------
+ # Calcul de l'analyse
+ # -------------------
K = (Ha * (RI * Hm)).I * Ha * RI
Xa = K * Y
+ #
+ if __storeState: selfA._setInternalState("Xn", Xa)
+ #--------------------------
+ #
selfA.StoredVariables["Analysis"].store( Xa )
#
# Calcul de la fonction coût
import numpy, scipy, scipy.optimize, scipy.version
# ==============================================================================
-def ecwnlls(selfA, Xb, Y, HO, R, B):
+def ecwnlls(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
"""
- Non Linear Least Squares
+ Correction
"""
#
# Initialisations
Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
#
Xa = Minimum
+ if __storeState: selfA._setInternalState("Xn", Xa)
#--------------------------
#
selfA.StoredVariables["Analysis"].store( Xa )
--- /dev/null
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2008-2022 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
+
+__doc__ = """
+ Standard Kalman Filter
+"""
+__author__ = "Jean-Philippe ARGAUD"
+
+import numpy
+from daCore.PlatformInfo import PlatformInfo
+mpr = PlatformInfo().MachinePrecision()
+
+# ==============================================================================
+def ecwstdkf(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
+ """
+ Correction
+ """
+ if selfA._parameters["EstimationOf"] == "Parameters":
+ selfA._parameters["StoreInternalVariables"] = True
+ #
+ # Initialisations
+ # ---------------
+ Hm = HO["Tangent"].asMatrix(Xb)
+ Hm = Hm.reshape(Y.size,Xb.size) # ADAO & check shape
+ Ha = HO["Adjoint"].asMatrix(Xb)
+ Ha = Ha.reshape(Xb.size,Y.size) # ADAO & check shape
+ #
+ HXb = Hm @ Xb
+ HXb = HXb.reshape((-1,1))
+ if Y.size != HXb.size:
+ raise ValueError("The size %i of observations Y and %i of observed calculation H(X) are different, they have to be identical."%(Y.size,HXb.size))
+ if max(Y.shape) != max(HXb.shape):
+ raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
+ #
+ if selfA._parameters["StoreInternalVariables"] or \
+ selfA._toStore("CostFunctionJ") or selfA._toStore("CostFunctionJAtCurrentOptimum") or \
+ selfA._toStore("CostFunctionJb") or selfA._toStore("CostFunctionJbAtCurrentOptimum") or \
+ selfA._toStore("CostFunctionJo") or selfA._toStore("CostFunctionJoAtCurrentOptimum") or \
+ selfA._toStore("CurrentOptimum") or selfA._toStore("APosterioriCovariance") or \
+ (Y.size > Xb.size):
+ if isinstance(B,numpy.ndarray):
+ BI = numpy.linalg.inv(B)
+ else:
+ BI = B.getI()
+ RI = R.getI()
+ #
+ Innovation = Y - HXb
+ if selfA._parameters["EstimationOf"] == "Parameters":
+ if CM is not None and "Tangent" in CM and U is not None: # Attention : si Cm est aussi dans H, doublon !
+ Cm = CM["Tangent"].asMatrix(Xb)
+ Cm = Cm.reshape(Xb.size,U.size) # ADAO & check shape
+ Innovation = Innovation - (Cm @ U).reshape((-1,1))
+ #
+ # Calcul de l'analyse
+ # -------------------
+ if Y.size <= Xb.size:
+ _HNHt = numpy.dot(Hm, B @ Ha)
+ _A = R + _HNHt
+ _u = numpy.linalg.solve( _A , Innovation )
+ Xa = Xb + (B @ (Ha @ _u)).reshape((-1,1))
+ K = B @ (Ha @ numpy.linalg.inv(_A))
+ else:
+ _HtRH = numpy.dot(Ha, RI @ Hm)
+ _A = BI + _HtRH
+ _u = numpy.linalg.solve( _A , numpy.dot(Ha, RI @ Innovation) )
+ Xa = Xb + _u.reshape((-1,1))
+ K = numpy.linalg.inv(_A) @ (Ha @ RI.asfullmatrix(Y.size))
+ #
+ Pa = B - K @ (Hm @ B)
+ Pa = (Pa + Pa.T) * 0.5 # Symétrie
+ Pa = Pa + mpr*numpy.trace( Pa ) * numpy.identity(Xa.size) # Positivité
+ #
+ if __storeState: selfA._setInternalState("Xn", Xa)
+ if __storeState: selfA._setInternalState("Pn", Pa)
+ #--------------------------
+ #
+ selfA.StoredVariables["Analysis"].store( Xa )
+ if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
+ selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Hm @ Xa )
+ if selfA._toStore("InnovationAtCurrentAnalysis"):
+ selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( Innovation )
+ # ---> avec current state
+ if selfA._parameters["StoreInternalVariables"] \
+ or selfA._toStore("CurrentState"):
+ selfA.StoredVariables["CurrentState"].store( Xa )
+ if selfA._toStore("BMA"):
+ selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(Xa) )
+ if selfA._toStore("InnovationAtCurrentState"):
+ selfA.StoredVariables["InnovationAtCurrentState"].store( Innovation )
+ if selfA._toStore("SimulatedObservationAtCurrentState") \
+ or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
+ selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HXb )
+ # ---> autres
+ if selfA._parameters["StoreInternalVariables"] \
+ or selfA._toStore("CostFunctionJ") \
+ or selfA._toStore("CostFunctionJb") \
+ or selfA._toStore("CostFunctionJo") \
+ or selfA._toStore("CurrentOptimum") or selfA._toStore("APosterioriCovariance"):
+ Jb = float( 0.5 * (Xa - Xb).T @ (BI @ (Xa - Xb)) )
+ Jo = float( 0.5 * Innovation.T @ (RI @ Innovation) )
+ J = Jb + Jo
+ selfA.StoredVariables["CostFunctionJb"].store( Jb )
+ selfA.StoredVariables["CostFunctionJo"].store( Jo )
+ selfA.StoredVariables["CostFunctionJ" ].store( J )
+ #
+ if selfA._toStore("IndexOfOptimum") \
+ or selfA._toStore("CurrentOptimum") \
+ or selfA._toStore("CostFunctionJAtCurrentOptimum") \
+ or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
+ or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
+ or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
+ IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][:] )
+ if selfA._toStore("IndexOfOptimum"):
+ selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
+ if selfA._toStore("CurrentOptimum"):
+ selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
+ if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
+ selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
+ if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
+ selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
+ if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
+ selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
+ if selfA._toStore("CostFunctionJAtCurrentOptimum"):
+ selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
+ if selfA._toStore("APosterioriCovariance"):
+ selfA.StoredVariables["APosterioriCovariance"].store( Pa )
+ #
+ return 0
+
+# ==============================================================================
+if __name__ == "__main__":
+ print('\n AUTODIAGNOSTIC\n')
if selfA._parameters["EstimationOf"] == "Parameters":
selfA._parameters["StoreInternalVariables"] = True
#
- # Opérateurs
- H = HO["Direct"].appliedControledFormTo
- #
- if selfA._parameters["EstimationOf"] == "State":
- M = EM["Direct"].appliedControledFormTo
- #
- if CM is not None and "Tangent" in CM and U is not None:
- Cm = CM["Tangent"].asMatrix(Xb)
- else:
- Cm = None
- #
# Durée d'observation et tailles
if hasattr(Y,"stepnumber"):
duration = Y.stepnumber()
previousJMinimum = numpy.finfo(float).max
#
for step in range(duration-1):
- if hasattr(Y,"store"):
- Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
- else:
- Ynpu = numpy.ravel( Y ).reshape((__p,1))
- #
- Ht = HO["Tangent"].asMatrix(ValueForMethodForm = Xn)
- Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
- Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
- Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
- #
- if selfA._parameters["EstimationOf"] == "State":
- Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
- Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
- Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
- Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
#
if U is not None:
if hasattr(U,"store") and len(U)>1:
Un = None
#
if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
+ Mt = EM["Tangent"].asMatrix(Xn)
+ Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
+ Ma = EM["Adjoint"].asMatrix(Xn)
+ Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
+ M = EM["Direct"].appliedControledFormTo
Xn_predicted = numpy.ravel( M( (Xn, Un) ) ).reshape((__n,1))
- if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
+ if CM is not None and "Tangent" in CM and Un is not None: # Attention : si Cm est aussi dans M, doublon !
+ Cm = CM["Tangent"].asMatrix(Xn_predicted)
Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
Xn_predicted = Xn_predicted + Cm @ Un
Pn_predicted = Q + Mt * (Pn * Ma)
Xn_predicted = Xn
Pn_predicted = Pn
#
+ if hasattr(Y,"store"):
+ Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
+ else:
+ Ynpu = numpy.ravel( Y ).reshape((__p,1))
+ #
+ Ht = HO["Tangent"].asMatrix(Xn_predicted)
+ Ht = Ht.reshape(Ynpu.size,Xn.size) # ADAO & check shape
+ Ha = HO["Adjoint"].asMatrix(Xn_predicted)
+ Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
+ H = HO["Direct"].appliedControledFormTo
+ #
if selfA._parameters["EstimationOf"] == "State":
HX_predicted = numpy.ravel( H( (Xn_predicted, None) ) ).reshape((__p,1))
_Innovation = Ynpu - HX_predicted
elif selfA._parameters["EstimationOf"] == "Parameters":
HX_predicted = numpy.ravel( H( (Xn_predicted, Un) ) ).reshape((__p,1))
_Innovation = Ynpu - HX_predicted
- if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
+ if CM is not None and "Tangent" in CM and Un is not None: # Attention : si Cm est aussi dans H, doublon !
+ Cm = CM["Tangent"].asMatrix(Xn_predicted)
+ Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
_Innovation = _Innovation - Cm @ Un
#
Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
mpr = PlatformInfo().MachinePrecision()
# ==============================================================================
-def incr3dvar(selfA, Xb, Y, HO, R, B):
+def incr3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
"""
- 3DVAR incrémental
+ Correction
"""
#
# Initialisations
iOuter = selfA.StoredVariables["CurrentIterationNumber"][-1]
#
Xa = Xr
+ if __storeState: selfA._setInternalState("Xn", Xa)
#--------------------------
#
selfA.StoredVariables["Analysis"].store( Xa )
from daCore.NumericObjects import HessienneEstimation, QuantilesEstimations
# ==============================================================================
-def psas3dvar(selfA, Xb, Y, HO, R, B):
+def psas3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
"""
- 3DVAR PSAS
+ Correction
"""
#
# Initialisations
if max(Y.shape) != max(HXb.shape):
raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
#
- if selfA._toStore("JacobianMatrixAtBackground"):
- HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
- HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
- selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
- #
Ht = HO["Tangent"].asMatrix(Xb)
+ Ht = Ht.reshape(Y.size,Xb.size) # ADAO & check shape
BHT = B * Ht.T
HBHTpR = R + Ht * BHT
Innovation = Y - HXb
#
+ if selfA._toStore("JacobianMatrixAtBackground"):
+ selfA.StoredVariables["JacobianMatrixAtBackground"].store( Ht )
+ #
Xini = numpy.zeros(Y.size)
#
# Définition de la fonction-coût
Minimum = Xb + BHT @ Minimum.reshape((-1,1))
#
Xa = Minimum
+ if __storeState: selfA._setInternalState("Xn", Xa)
#--------------------------
#
selfA.StoredVariables["Analysis"].store( Xa )
from daCore.NumericObjects import HessienneEstimation, QuantilesEstimations
# ==============================================================================
-def std3dvar(selfA, Xb, Y, HO, R, B):
+def std3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
"""
- 3DVAR
+ Correction
"""
#
# Initialisations
raise ValueError("The shapes %s of observations Y and %s of observed calculation H(X) are different, they have to be identical."%(Y.shape,HXb.shape))
#
if selfA._toStore("JacobianMatrixAtBackground"):
- HtMb = HO["Tangent"].asMatrix(ValueForMethodForm = Xb)
+ HtMb = HO["Tangent"].asMatrix(Xb)
HtMb = HtMb.reshape(Y.size,Xb.size) # ADAO & check shape
selfA.StoredVariables["JacobianMatrixAtBackground"].store( HtMb )
#
Minimum = selfA.StoredVariables["CurrentState"][IndexMin]
#
Xa = Minimum
+ if __storeState: selfA._setInternalState("Xn", Xa)
#--------------------------
#
selfA.StoredVariables["Analysis"].store( Xa )
# ==============================================================================
def std4dvar(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
"""
- 4DVAR
+ Correction
"""
#
# Initialisations
+++ /dev/null
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2008-2022 EDF R&D
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-#
-# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
-#
-# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
-
-__doc__ = """
- Standard Kalman Filter
-"""
-__author__ = "Jean-Philippe ARGAUD"
-
-import numpy
-from daCore.PlatformInfo import PlatformInfo
-mpr = PlatformInfo().MachinePrecision()
-mfp = PlatformInfo().MaximumPrecision()
-
-# ==============================================================================
-def stdkf(selfA, Xb, Y, U, HO, EM, CM, R, B, Q):
- """
- Standard Kalman Filter
- """
- if selfA._parameters["EstimationOf"] == "Parameters":
- selfA._parameters["StoreInternalVariables"] = True
- #
- # Opérateurs
- # ----------
- Ht = HO["Tangent"].asMatrix(Xb)
- Ha = HO["Adjoint"].asMatrix(Xb)
- #
- if selfA._parameters["EstimationOf"] == "State":
- Mt = EM["Tangent"].asMatrix(Xb)
- Ma = EM["Adjoint"].asMatrix(Xb)
- #
- if CM is not None and "Tangent" in CM and U is not None:
- Cm = CM["Tangent"].asMatrix(Xb)
- else:
- Cm = None
- #
- # Durée d'observation et tailles
- if hasattr(Y,"stepnumber"):
- duration = Y.stepnumber()
- __p = numpy.cumprod(Y.shape())[-1]
- else:
- duration = 2
- __p = numpy.array(Y).size
- #
- # Précalcul des inversions de B et R
- if selfA._parameters["StoreInternalVariables"] \
- or selfA._toStore("CostFunctionJ") \
- or selfA._toStore("CostFunctionJb") \
- or selfA._toStore("CostFunctionJo") \
- or selfA._toStore("CurrentOptimum") \
- or selfA._toStore("APosterioriCovariance"):
- BI = B.getI()
- RI = R.getI()
- #
- __n = Xb.size
- nbPreviousSteps = len(selfA.StoredVariables["Analysis"])
- #
- if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
- Xn = Xb
- Pn = B
- selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
- selfA.StoredVariables["Analysis"].store( Xb )
- if selfA._toStore("APosterioriCovariance"):
- if hasattr(B,"asfullmatrix"):
- selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(__n) )
- else:
- selfA.StoredVariables["APosterioriCovariance"].store( B )
- selfA._setInternalState("seed", numpy.random.get_state())
- elif selfA._parameters["nextStep"]:
- Xn = selfA._getInternalState("Xn")
- Pn = selfA._getInternalState("Pn")
- #
- if selfA._parameters["EstimationOf"] == "Parameters":
- XaMin = Xn
- previousJMinimum = numpy.finfo(float).max
- #
- for step in range(duration-1):
- if hasattr(Y,"store"):
- Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
- else:
- Ynpu = numpy.ravel( Y ).reshape((__p,1))
- #
- if U is not None:
- if hasattr(U,"store") and len(U)>1:
- Un = numpy.ravel( U[step] ).reshape((-1,1))
- elif hasattr(U,"store") and len(U)==1:
- Un = numpy.ravel( U[0] ).reshape((-1,1))
- else:
- Un = numpy.ravel( U ).reshape((-1,1))
- else:
- Un = None
- #
- if selfA._parameters["EstimationOf"] == "State": # Forecast + Q and observation of forecast
- Xn_predicted = Mt @ Xn
- if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
- Cm = Cm.reshape(__n,Un.size) # ADAO & check shape
- Xn_predicted = Xn_predicted + Cm @ Un
- Pn_predicted = Q + Mt * (Pn * Ma)
- elif selfA._parameters["EstimationOf"] == "Parameters": # Observation of forecast
- # --- > Par principe, M = Id, Q = 0
- Xn_predicted = Xn
- Pn_predicted = Pn
- #
- if selfA._parameters["EstimationOf"] == "State":
- HX_predicted = Ht @ Xn_predicted
- _Innovation = Ynpu - HX_predicted
- elif selfA._parameters["EstimationOf"] == "Parameters":
- HX_predicted = Ht @ Xn_predicted
- _Innovation = Ynpu - HX_predicted
- if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
- _Innovation = _Innovation - Cm @ Un
- #
- Kn = Pn_predicted * Ha * numpy.linalg.inv(R + numpy.dot(Ht, Pn_predicted * Ha))
- Xn = Xn_predicted + Kn * _Innovation
- Pn = Pn_predicted - Kn * Ht * Pn_predicted
- #
- Xa = Xn # Pointeurs
- #--------------------------
- selfA._setInternalState("Xn", Xn)
- selfA._setInternalState("Pn", Pn)
- #--------------------------
- #
- selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
- # ---> avec analysis
- selfA.StoredVariables["Analysis"].store( Xa )
- if selfA._toStore("SimulatedObservationAtCurrentAnalysis"):
- selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"].store( Ht * Xa )
- if selfA._toStore("InnovationAtCurrentAnalysis"):
- selfA.StoredVariables["InnovationAtCurrentAnalysis"].store( _Innovation )
- # ---> avec current state
- if selfA._parameters["StoreInternalVariables"] \
- or selfA._toStore("CurrentState"):
- selfA.StoredVariables["CurrentState"].store( Xn )
- if selfA._toStore("ForecastState"):
- selfA.StoredVariables["ForecastState"].store( Xn_predicted )
- if selfA._toStore("ForecastCovariance"):
- selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
- if selfA._toStore("BMA"):
- selfA.StoredVariables["BMA"].store( Xn_predicted - Xa )
- if selfA._toStore("InnovationAtCurrentState"):
- selfA.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
- if selfA._toStore("SimulatedObservationAtCurrentState") \
- or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
- selfA.StoredVariables["SimulatedObservationAtCurrentState"].store( HX_predicted )
- # ---> autres
- if selfA._parameters["StoreInternalVariables"] \
- or selfA._toStore("CostFunctionJ") \
- or selfA._toStore("CostFunctionJb") \
- or selfA._toStore("CostFunctionJo") \
- or selfA._toStore("CurrentOptimum") \
- or selfA._toStore("APosterioriCovariance"):
- Jb = float( 0.5 * (Xa - Xb).T * (BI * (Xa - Xb)) )
- Jo = float( 0.5 * _Innovation.T * (RI * _Innovation) )
- J = Jb + Jo
- selfA.StoredVariables["CostFunctionJb"].store( Jb )
- selfA.StoredVariables["CostFunctionJo"].store( Jo )
- selfA.StoredVariables["CostFunctionJ" ].store( J )
- #
- if selfA._toStore("IndexOfOptimum") \
- or selfA._toStore("CurrentOptimum") \
- or selfA._toStore("CostFunctionJAtCurrentOptimum") \
- or selfA._toStore("CostFunctionJbAtCurrentOptimum") \
- or selfA._toStore("CostFunctionJoAtCurrentOptimum") \
- or selfA._toStore("SimulatedObservationAtCurrentOptimum"):
- IndexMin = numpy.argmin( selfA.StoredVariables["CostFunctionJ"][nbPreviousSteps:] ) + nbPreviousSteps
- if selfA._toStore("IndexOfOptimum"):
- selfA.StoredVariables["IndexOfOptimum"].store( IndexMin )
- if selfA._toStore("CurrentOptimum"):
- selfA.StoredVariables["CurrentOptimum"].store( selfA.StoredVariables["Analysis"][IndexMin] )
- if selfA._toStore("SimulatedObservationAtCurrentOptimum"):
- selfA.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( selfA.StoredVariables["SimulatedObservationAtCurrentAnalysis"][IndexMin] )
- if selfA._toStore("CostFunctionJbAtCurrentOptimum"):
- selfA.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJb"][IndexMin] )
- if selfA._toStore("CostFunctionJoAtCurrentOptimum"):
- selfA.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( selfA.StoredVariables["CostFunctionJo"][IndexMin] )
- if selfA._toStore("CostFunctionJAtCurrentOptimum"):
- selfA.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( selfA.StoredVariables["CostFunctionJ" ][IndexMin] )
- if selfA._toStore("APosterioriCovariance"):
- selfA.StoredVariables["APosterioriCovariance"].store( Pn )
- if selfA._parameters["EstimationOf"] == "Parameters" \
- and J < previousJMinimum:
- previousJMinimum = J
- XaMin = Xa
- if selfA._toStore("APosterioriCovariance"):
- covarianceXaMin = selfA.StoredVariables["APosterioriCovariance"][-1]
- #
- # Stockage final supplémentaire de l'optimum en estimation de paramètres
- # ----------------------------------------------------------------------
- if selfA._parameters["EstimationOf"] == "Parameters":
- selfA.StoredVariables["CurrentIterationNumber"].store( len(selfA.StoredVariables["Analysis"]) )
- selfA.StoredVariables["Analysis"].store( XaMin )
- if selfA._toStore("APosterioriCovariance"):
- selfA.StoredVariables["APosterioriCovariance"].store( covarianceXaMin )
- if selfA._toStore("BMA"):
- selfA.StoredVariables["BMA"].store( numpy.ravel(Xb) - numpy.ravel(XaMin) )
- #
- return 0
-
-# ==============================================================================
-if __name__ == "__main__":
- print('\n AUTODIAGNOSTIC\n')
Wc = numpy.array( Ww )
Wc[0] = Lambda / (L + Lambda) + (1. - Alpha**2 + Beta)
#
- # Opérateurs
- Hm = HO["Direct"].appliedControledFormTo
- #
- if selfA._parameters["EstimationOf"] == "State":
- Mm = EM["Direct"].appliedControledFormTo
- #
- if CM is not None and "Tangent" in CM and U is not None:
- Cm = CM["Tangent"].asMatrix(Xb)
- else:
- Cm = None
- #
# Durée d'observation et tailles
if hasattr(Y,"stepnumber"):
duration = Y.stepnumber()
previousJMinimum = numpy.finfo(float).max
#
for step in range(duration-1):
- if hasattr(Y,"store"):
- Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
- else:
- Ynpu = numpy.ravel( Y ).reshape((__p,1))
#
if U is not None:
if hasattr(U,"store") and len(U)>1:
else:
Un = None
#
+ if CM is not None and "Tangent" in CM and U is not None:
+ Cm = CM["Tangent"].asMatrix(Xn)
+ else:
+ Cm = None
+ #
Pndemi = numpy.real(scipy.linalg.sqrtm(Pn))
Xnp = numpy.hstack([Xn, Xn+Gamma*Pndemi, Xn-Gamma*Pndemi])
nbSpts = 2*Xn.size+1
XEtnnp = []
for point in range(nbSpts):
if selfA._parameters["EstimationOf"] == "State":
+ Mm = EM["Direct"].appliedControledFormTo
XEtnnpi = numpy.asarray( Mm( (Xnp[:,point], Un) ) ).reshape((-1,1))
if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
#
Xnnp = numpy.hstack([Xncm.reshape((-1,1)), Xncm.reshape((-1,1))+Gamma*Pnmdemi, Xncm.reshape((-1,1))-Gamma*Pnmdemi])
#
+ Hm = HO["Direct"].appliedControledFormTo
Ynnp = []
for point in range(nbSpts):
if selfA._parameters["EstimationOf"] == "State":
Pyyn += Wc[i] * ((Ynnp[:,point]-Yncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm))
Pxyn += Wc[i] * ((Xnnp[:,point]-Xncm).reshape((-1,1)) * (Ynnp[:,point]-Yncm))
#
+ if hasattr(Y,"store"):
+ Ynpu = numpy.ravel( Y[step+1] ).reshape((__p,1))
+ else:
+ Ynpu = numpy.ravel( Y ).reshape((__p,1))
_Innovation = Ynpu - Yncm.reshape((-1,1))
if selfA._parameters["EstimationOf"] == "Parameters":
if Cm is not None and Un is not None: # Attention : si Cm est aussi dans H, doublon !
mpr = PlatformInfo().MachinePrecision()
# ==============================================================================
-def van3dvar(selfA, Xb, Y, HO, R, B):
+def van3dvar(selfA, Xb, Y, U, HO, CM, R, B, __storeState = False):
"""
- 3DVAR variational analysis with no inversion of B
+ Correction
"""
#
# Initialisations
Minimum = Xb + B * Minimum.reshape((-1,1)) # Pas @
#
Xa = Minimum
+ if __storeState: selfA._setInternalState("Xn", Xa)
#--------------------------
#
selfA.StoredVariables["Analysis"].store( Xa )
"CostFunctionJbAtCurrentOptimum",
"CostFunctionJo",
"CostFunctionJoAtCurrentOptimum",
- "CurrentIterationNumber",
"CurrentOptimum",
"CurrentState",
+ "CurrentStepNumber",
"ForecastState",
"Innovation",
"InnovationAtCurrentAnalysis",
#
#--------------------------
elif self._parameters["Variant"] == "OneCorrection":
- ecwblue.ecwblue(self, Xb, Y, HO, R, B)
+ ecwblue.ecwblue(self, Xb, Y, U, HO, CM, R, B)
#
#--------------------------
else:
"CostFunctionJoAtCurrentOptimum",
"CurrentOptimum",
"CurrentState",
+ "CurrentStepNumber",
"ForecastState",
"Innovation",
"InnovationAtCurrentAnalysis",
#
#--------------------------
elif self._parameters["Variant"] == "OneCorrection":
- ecwexblue.ecwexblue(self, Xb, Y, HO, R, B)
+ ecwexblue.ecwexblue(self, Xb, Y, U, HO, CM, R, B)
#
#--------------------------
else:
#
# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
-from daCore import BasicObjects
-from daAlgorithms.Atoms import stdkf
+from daCore import BasicObjects, NumericObjects
+from daAlgorithms.Atoms import ecwstdkf
# ==============================================================================
class ElementaryAlgorithm(BasicObjects.Algorithm):
def __init__(self):
BasicObjects.Algorithm.__init__(self, "KALMANFILTER")
+ self.defineRequiredParameter(
+ name = "Variant",
+ default = "KalmanFilter",
+ typecast = str,
+ message = "Variant ou formulation de la méthode",
+ listval = [
+ "KalmanFilter",
+ "OneCorrection",
+ ],
+ )
self.defineRequiredParameter(
name = "EstimationOf",
default = "State",
typecast = str,
- message = "Estimation d'etat ou de parametres",
+ message = "Estimation d'état ou de paramètres",
listval = ["State", "Parameters"],
)
self.defineRequiredParameter(
"CostFunctionJbAtCurrentOptimum",
"CostFunctionJo",
"CostFunctionJoAtCurrentOptimum",
- "CurrentIterationNumber",
"CurrentOptimum",
"CurrentState",
+ "CurrentStepNumber",
"ForecastCovariance",
"ForecastState",
"IndexOfOptimum",
self._pre_run(Parameters, Xb, Y, U, HO, EM, CM, R, B, Q)
#
#--------------------------
- stdkf.stdkf(self, Xb, Y, U, HO, EM, CM, R, B, Q)
+ if self._parameters["Variant"] == "KalmanFilter":
+ NumericObjects.multiXOsteps(self, Xb, Y, U, HO, EM, CM, R, B, Q, ecwstdkf.ecwstdkf, True, True)
+ #
+ #--------------------------
+ elif self._parameters["Variant"] == "OneCorrection":
+ ecwstdkf.ecwstdkf(self, Xb, Y, U, HO, CM, R, B)
+ #
#--------------------------
+ else:
+ raise ValueError("Error in Variant name: %s"%self._parameters["Variant"])
#
self._post_run(HO)
return 0
"CostFunctionJoAtCurrentOptimum",
"CurrentOptimum",
"CurrentState",
+ "CurrentStepNumber",
"ForecastState",
"InnovationAtCurrentAnalysis",
"OMA",
#
#--------------------------
elif self._parameters["Variant"] == "OneCorrection":
- ecwlls.ecwlls(self, Xb, Y, HO, R, B)
+ ecwlls.ecwlls(self, Xb, Y, U, HO, CM, R, B)
#
#--------------------------
else:
"CurrentIterationNumber",
"CurrentOptimum",
"CurrentState",
+ "CurrentStepNumber",
"ForecastState",
"IndexOfOptimum",
"Innovation",
#
#--------------------------
elif self._parameters["Variant"] == "OneCorrection":
- ecwnlls.ecwnlls(self, Xb, Y, HO, R, B)
+ ecwnlls.ecwnlls(self, Xb, Y, U, HO, CM, R, B)
#
#--------------------------
else:
"CostFunctionJbAtCurrentOptimum",
"CostFunctionJo",
"CostFunctionJoAtCurrentOptimum",
- "CurrentIterationNumber",
"CurrentOptimum",
"CurrentState",
"ForecastCovariance",
selfB._parameters["Bounds"] = None
selfB._parameters["InitializationPoint"] = Xf
from daAlgorithms.Atoms import std3dvar
- std3dvar.std3dvar(selfB, Xf, __Ynpu, __HO, __R, Pf)
+ std3dvar.std3dvar(selfB, Xf, __Ynpu, None, __HO, None, __R, Pf)
Xa = selfB.get("Analysis")[-1].reshape((-1,1))
del selfB
#
return Xa + EnsembleOfAnomalies( __EnXn )
# ==============================================================================
-def multiXOsteps(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, oneCycle):
+def multiXOsteps(selfA, Xb, Y, U, HO, EM, CM, R, B, Q, oneCycle,
+ __CovForecast = False, __LinEvolution = False,
+ ):
"""
Prévision multi-pas avec une correction par pas (multi-méthodes)
"""
if selfA._parameters["EstimationOf"] == "State":
if len(selfA.StoredVariables["Analysis"])==0 or not selfA._parameters["nextStep"]:
Xn = numpy.asarray(Xb)
+ if __CovForecast: Pn = B
selfA.StoredVariables["Analysis"].store( Xn )
if selfA._toStore("APosterioriCovariance"):
if hasattr(B,"asfullmatrix"):
selfA.StoredVariables["APosterioriCovariance"].store( B.asfullmatrix(Xn.size) )
else:
selfA.StoredVariables["APosterioriCovariance"].store( B )
- if selfA._toStore("ForecastState"):
- selfA.StoredVariables["ForecastState"].store( Xn )
selfA._setInternalState("seed", numpy.random.get_state())
elif selfA._parameters["nextStep"]:
Xn = selfA._getInternalState("Xn")
+ if __CovForecast: Pn = selfA._getInternalState("Pn")
else:
Xn = numpy.asarray(Xb)
+ if __CovForecast: Pn = B
#
if hasattr(Y,"stepnumber"):
duration = Y.stepnumber()
# Multi-steps
# -----------
for step in range(duration-1):
+ selfA.StoredVariables["CurrentStepNumber"].store( len(selfA.StoredVariables["Analysis"]) )
+ #
if hasattr(Y,"store"):
Ynpu = numpy.asarray( Y[step+1] ).reshape((-1,1))
else:
else:
Un = None
#
- if selfA._parameters["EstimationOf"] == "State": # Forecast
- M = EM["Direct"].appliedControledFormTo
- if CM is not None and "Tangent" in CM and Un is not None:
- Cm = CM["Tangent"].asMatrix(Xn)
+ # Predict (Time Update)
+ # ---------------------
+ if selfA._parameters["EstimationOf"] == "State":
+ if __CovForecast or __LinEvolution:
+ Mt = EM["Tangent"].asMatrix(Xn)
+ Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
+ if __CovForecast:
+ Ma = EM["Adjoint"].asMatrix(Xn)
+ Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
+ Pn_predicted = Q + Mt @ (Pn @ Ma)
+ if __LinEvolution:
+ Xn_predicted = Mt @ Xn
else:
- Cm = None
- #
- Xn_predicted = M( (Xn, Un) )
- if selfA._toStore("ForecastState"):
- selfA.StoredVariables["ForecastState"].store( Xn_predicted )
- if Cm is not None and Un is not None: # Attention : si Cm est aussi dans M, doublon !
+ M = EM["Direct"].appliedControledFormTo
+ Xn_predicted = M( (Xn, Un) )
+ if CM is not None and "Tangent" in CM and Un is not None: # Attention : si Cm est aussi dans M, doublon !
+ Cm = CM["Tangent"].asMatrix(Xn_predicted)
Cm = Cm.reshape(Xn.size,Un.size) # ADAO & check shape
- Xn_predicted = Xn_predicted + Cm @ Un
+ Xn_predicted = Xn_predicted + (Cm @ Un).reshape((-1,1))
elif selfA._parameters["EstimationOf"] == "Parameters": # No forecast
# --- > Par principe, M = Id, Q = 0
Xn_predicted = Xn
+ if __CovForecast: Pn_predicted = Pn
Xn_predicted = numpy.asarray(Xn_predicted).reshape((-1,1))
+ if selfA._toStore("ForecastState"):
+ selfA.StoredVariables["ForecastState"].store( Xn_predicted )
+ if __CovForecast:
+ if hasattr(Pn_predicted,"asfullmatrix"):
+ Pn_predicted = Pn_predicted.asfullmatrix(Xn.size)
+ else:
+ Pn_predicted = numpy.asarray(Pn_predicted).reshape((Xn.size,Xn.size))
+ if selfA._toStore("ForecastCovariance"):
+ selfA.StoredVariables["ForecastCovariance"].store( Pn_predicted )
#
- oneCycle(selfA, Xn_predicted, Ynpu, HO, R, B) # Correct
+ # Correct (Measurement Update)
+ # ----------------------------
+ if __CovForecast:
+ oneCycle(selfA, Xn_predicted, Ynpu, Un, HO, CM, R, Pn_predicted, True)
+ else:
+ oneCycle(selfA, Xn_predicted, Ynpu, Un, HO, CM, R, B, True)
#
- Xn = selfA.StoredVariables["Analysis"][-1]
#--------------------------
- selfA._setInternalState("Xn", Xn)
+ Xn = selfA._getInternalState("Xn")
+ if __CovForecast: Pn = selfA._getInternalState("Pn")
#
return 0