Salome HOME
projects
/
modules
/
adao.git
/ commitdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
| commitdiff |
tree
raw
|
patch
|
inline
| side by side (parent:
2e6fd5a
)
Improving KF and EKF performances for parameter estimation
author
Jean-Philippe ARGAUD
<jean-philippe.argaud@edf.fr>
Thu, 7 Feb 2013 20:15:37 +0000
(21:15 +0100)
committer
Jean-Philippe ARGAUD
<jean-philippe.argaud@edf.fr>
Thu, 7 Feb 2013 20:15:37 +0000
(21:15 +0100)
src/daComposant/daAlgorithms/Blue.py
patch
|
blob
|
history
src/daComposant/daAlgorithms/ExtendedKalmanFilter.py
patch
|
blob
|
history
src/daComposant/daAlgorithms/KalmanFilter.py
patch
|
blob
|
history
diff --git
a/src/daComposant/daAlgorithms/Blue.py
b/src/daComposant/daAlgorithms/Blue.py
index 46130cfb969a385962aace279220dfc8441e5441..99619a219a3d2aa10c5e1de716ad6eb20ed41c80 100644
(file)
--- a/
src/daComposant/daAlgorithms/Blue.py
+++ b/
src/daComposant/daAlgorithms/Blue.py
@@
-48,8
+48,8
@@
class ElementaryAlgorithm(BasicObjects.Algorithm):
#
# Opérateur d'observation
# -----------------------
#
# Opérateur d'observation
# -----------------------
- Hm = HO["Tangent"].asMatrix(
None
)
- Ha = HO["Adjoint"].asMatrix(
None
)
+ Hm = HO["Tangent"].asMatrix(
Xb
)
+ Ha = HO["Adjoint"].asMatrix(
Xb
)
#
# Utilisation éventuelle d'un vecteur H(Xb) précalculé
# ----------------------------------------------------
#
# Utilisation éventuelle d'un vecteur H(Xb) précalculé
# ----------------------------------------------------
diff --git
a/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py
b/src/daComposant/daAlgorithms/ExtendedKalmanFilter.py
index f025924e7c1d6fa87e03993ec4bd8fbb9cd685ea..053a9fb7687a6c6daa8ba13e8579d32dbed2b487 100644
(file)
--- a/
src/daComposant/daAlgorithms/ExtendedKalmanFilter.py
+++ b/
src/daComposant/daAlgorithms/ExtendedKalmanFilter.py
@@
-61,7
+61,8
@@
class ElementaryAlgorithm(BasicObjects.Algorithm):
#
H = HO["Direct"].appliedControledFormTo
#
#
H = HO["Direct"].appliedControledFormTo
#
- M = EM["Direct"].appliedControledFormTo
+ if self._parameters["EstimationType"] == "State":
+ M = EM["Direct"].appliedControledFormTo
#
# Nombre de pas du Kalman identique au nombre de pas d'observations
# -----------------------------------------------------------------
#
# Nombre de pas du Kalman identique au nombre de pas d'observations
# -----------------------------------------------------------------
@@
-102,10
+103,11
@@
class ElementaryAlgorithm(BasicObjects.Algorithm):
Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
#
Ha = HO["Adjoint"].asMatrix(ValueForMethodForm = Xn)
Ha = Ha.reshape(Xn.size,Ynpu.size) # ADAO & check shape
#
- Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
- Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
- Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
- Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
+ if self._parameters["EstimationType"] == "State":
+ Mt = EM["Tangent"].asMatrix(ValueForMethodForm = Xn)
+ Mt = Mt.reshape(Xn.size,Xn.size) # ADAO & check shape
+ Ma = EM["Adjoint"].asMatrix(ValueForMethodForm = Xn)
+ Ma = Ma.reshape(Xn.size,Xn.size) # ADAO & check shape
#
if U is not None:
if hasattr(U,"store") and len(U)>1:
#
if U is not None:
if hasattr(U,"store") and len(U)>1:
@@
-119,14
+121,19
@@
class ElementaryAlgorithm(BasicObjects.Algorithm):
#
if self._parameters["EstimationType"] == "State":
Xn_predicted = numpy.asmatrix(numpy.ravel( M( (Xn, Un) ) )).T
#
if self._parameters["EstimationType"] == "State":
Xn_predicted = numpy.asmatrix(numpy.ravel( M( (Xn, Un) ) )).T
- else:
- Xn_predicted = numpy.asmatrix(numpy.ravel( M( (Xn, None) ) )).T
- Pn_predicted = Mt * Pn * Ma + Q
+ Pn_predicted = Mt * Pn * Ma + Q
+ elif self._parameters["EstimationType"] == "Parameters":
+ # Xn_predicted = numpy.asmatrix(numpy.ravel( M( (Xn, None) ) )).T
+ # Pn_predicted = Mt * Pn * Ma + Q
+ # --- > Par principe, M = Id, Q = 0
+ Xn_predicted = Xn
+ Pn_predicted = Pn
#
if self._parameters["EstimationType"] == "Parameters":
d = Ynpu - numpy.asmatrix(numpy.ravel( H( (Xn_predicted, Un) ) )).T
#
if self._parameters["EstimationType"] == "Parameters":
d = Ynpu - numpy.asmatrix(numpy.ravel( H( (Xn_predicted, Un) ) )).T
- el
se
:
+ el
if self._parameters["EstimationType"] == "State"
:
d = Ynpu - numpy.asmatrix(numpy.ravel( H( (Xn_predicted, None) ) )).T
d = Ynpu - numpy.asmatrix(numpy.ravel( H( (Xn_predicted, None) ) )).T
+ #
K = Pn_predicted * Ha * (Ht * Pn_predicted * Ha + R).I
Xn = Xn_predicted + K * d
Pn = Pn_predicted - K * Ht * Pn_predicted
K = Pn_predicted * Ha * (Ht * Pn_predicted * Ha + R).I
Xn = Xn_predicted + K * d
Pn = Pn_predicted - K * Ht * Pn_predicted
diff --git
a/src/daComposant/daAlgorithms/KalmanFilter.py
b/src/daComposant/daAlgorithms/KalmanFilter.py
index cd96ea1418a0bee800cb724dc9f0a21911ee8688..118fd9ec98d2215c219309980ffdccec505a85c8 100644
(file)
--- a/
src/daComposant/daAlgorithms/KalmanFilter.py
+++ b/
src/daComposant/daAlgorithms/KalmanFilter.py
@@
-59,14
+59,15
@@
class ElementaryAlgorithm(BasicObjects.Algorithm):
if R is None:
raise ValueError("Observation error covariance matrix has to be properly defined!")
#
if R is None:
raise ValueError("Observation error covariance matrix has to be properly defined!")
#
- Ht = HO["Tangent"].asMatrix(
None
)
- Ha = HO["Adjoint"].asMatrix(
None
)
+ Ht = HO["Tangent"].asMatrix(
Xb
)
+ Ha = HO["Adjoint"].asMatrix(
Xb
)
#
#
- Mt = EM["Tangent"].asMatrix(None)
- Ma = EM["Adjoint"].asMatrix(None)
+ if self._parameters["EstimationType"] == "State":
+ Mt = EM["Tangent"].asMatrix(Xb)
+ Ma = EM["Adjoint"].asMatrix(Xb)
#
if CM is not None and CM.has_key("Tangent") and U is not None:
#
if CM is not None and CM.has_key("Tangent") and U is not None:
- Cm = CM["Tangent"].asMatrix(
None
)
+ Cm = CM["Tangent"].asMatrix(
Xb
)
else:
Cm = None
#
else:
Cm = None
#
@@
-113,14
+114,22
@@
class ElementaryAlgorithm(BasicObjects.Algorithm):
#
if self._parameters["EstimationType"] == "State" and Cm is not None and Un is not None:
Xn_predicted = Mt * Xn + Cm * Un
#
if self._parameters["EstimationType"] == "State" and Cm is not None and Un is not None:
Xn_predicted = Mt * Xn + Cm * Un
- else:
+ Pn_predicted = Mt * Pn * Ma + Q
+ elif self._parameters["EstimationType"] == "State" and (Cm is None or Un is None):
Xn_predicted = Mt * Xn
Xn_predicted = Mt * Xn
- Pn_predicted = Mt * Pn * Ma + Q
+ Pn_predicted = Mt * Pn * Ma + Q
+ elif self._parameters["EstimationType"] == "Parameters":
+ # Xn_predicted = Mt * Xn
+ # Pn_predicted = Mt * Pn * Ma + Q
+ # --- > Par principe, M = Id, Q = 0
+ Xn_predicted = Xn
+ Pn_predicted = Pn
#
if self._parameters["EstimationType"] == "Parameters" and Cm is not None and Un is not None:
d = Ynpu - Ht * Xn_predicted - Cm * Un
else:
d = Ynpu - Ht * Xn_predicted
#
if self._parameters["EstimationType"] == "Parameters" and Cm is not None and Un is not None:
d = Ynpu - Ht * Xn_predicted - Cm * Un
else:
d = Ynpu - Ht * Xn_predicted
+ #
K = Pn_predicted * Ha * (Ht * Pn_predicted * Ha + R).I
Xn = Xn_predicted + K * d
Pn = Pn_predicted - K * Ht * Pn_predicted
K = Pn_predicted * Ha * (Ht * Pn_predicted * Ha + R).I
Xn = Xn_predicted + K * d
Pn = Pn_predicted - K * Ht * Pn_predicted