From 41a17018c0720c4270debd7de5778088156fe935 Mon Sep 17 00:00:00 2001 From: Jean-Philippe ARGAUD Date: Wed, 25 Jan 2017 21:59:13 +0100 Subject: [PATCH] Minor source and test corrections for precision control --- src/daComposant/daAlgorithms/3DVAR.py | 6 +- src/daComposant/daAlgorithms/Blue.py | 6 +- src/daComposant/daAlgorithms/ExtendedBlue.py | 6 +- ...erification_des_Assimilation_Algorithms.py | 75 ++++++++++++++++++- 4 files changed, 83 insertions(+), 10 deletions(-) diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index 2c80c6d..50193f7 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -168,9 +168,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["InnovationAtCurrentState"].store( _Innovation ) # - Jb = 0.5 * (_X - Xb).T * BI * (_X - Xb) - Jo = 0.5 * _Innovation.T * RI * _Innovation - J = float( Jb ) + float( Jo ) + Jb = float( 0.5 * (_X - Xb).T * BI * (_X - Xb) ) + Jo = float( 0.5 * _Innovation.T * RI * _Innovation ) + J = Jb + Jo # self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) diff --git a/src/daComposant/daAlgorithms/Blue.py b/src/daComposant/daAlgorithms/Blue.py index f59dc38..2f6c2d8 100644 --- a/src/daComposant/daAlgorithms/Blue.py +++ b/src/daComposant/daAlgorithms/Blue.py @@ -132,9 +132,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"] or \ "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: # - Jb = 0.5 * (Xa - Xb).T * BI * (Xa - Xb) - Jo = 0.5 * oma.T * RI * oma - J = float( Jb ) + float( Jo ) + Jb = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) ) + Jo = float( 0.5 * oma.T * RI * oma ) + J = Jb + Jo # self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) diff --git a/src/daComposant/daAlgorithms/ExtendedBlue.py b/src/daComposant/daAlgorithms/ExtendedBlue.py index ff57610..4ecda94 100644 --- a/src/daComposant/daAlgorithms/ExtendedBlue.py +++ b/src/daComposant/daAlgorithms/ExtendedBlue.py @@ -132,9 +132,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if self._parameters["StoreInternalVariables"] or \ "CostFunctionJ" in self._parameters["StoreSupplementaryCalculations"] or \ "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: - Jb = 0.5 * (Xa - Xb).T * BI * (Xa - Xb) - Jo = 0.5 * oma.T * RI * oma - J = float( Jb ) + float( Jo ) + Jb = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) ) + Jo = float( 0.5 * oma.T * RI * oma ) + J = Jb + Jo self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) diff --git a/test/test6901/Verification_des_Assimilation_Algorithms.py b/test/test6901/Verification_des_Assimilation_Algorithms.py index ab34c37..e933b95 100644 --- a/test/test6901/Verification_des_Assimilation_Algorithms.py +++ b/test/test6901/Verification_des_Assimilation_Algorithms.py @@ -24,7 +24,7 @@ # ============================================================================== import adaoBuilder, numpy def test1(): - """Verification de la disponibilite de l'ensemble des algorithmes""" + """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur matriciel)""" Xa = {} for algo in ("3DVAR", "Blue", "ExtendedBlue", "LinearLeastSquares", "NonLinearLeastSquares", "DerivativeFreeOptimization"): print @@ -107,6 +107,78 @@ def test1(): # return 0 +def test2(): + """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur fonctionnel)""" + Xa = {} + M = numpy.matrix("1 0 0;0 2 0;0 0 3") + def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T + for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): + print + msg = "Algorithme en test : %s"%algo + print msg+"\n"+"-"*len(msg) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") + adaopy.setObservationOperator(OneFunction = H) + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa[algo] = adaopy.get("Analysis")[-1] + del adaopy + # + M = numpy.matrix("1 0 0;0 2 0;0 0 3") + def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T + for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"): + print + msg = "Algorithme en test : %s"%algo + print msg+"\n"+"-"*len(msg) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, }) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") + adaopy.setObservationOperator(OneFunction = H) + adaopy.setEvolutionError (ScalarSparseMatrix = 1.) + adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1") + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa[algo] = adaopy.get("Analysis")[-1] + del adaopy + # + M = numpy.matrix("1 0 0;0 1 0;0 0 1") + def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T + for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): + print + msg = "Algorithme en test : %s"%algo + print msg+"\n"+"-"*len(msg) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000, }) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3") + adaopy.setObservationOperator(OneFunction = H) + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa[algo] = adaopy.get("Analysis")[-1] + del adaopy + # + print + msg = "Tests des ecarts attendus :" + print msg+"\n"+"="*len(msg) + verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa) + verify_similarity_of_algo_results(("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter"), Xa) + print " Les resultats obtenus sont corrects." + print + # + return 0 + def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""): """Comparaison de deux vecteurs""" print " Difference maximale %s: %.2e"%(msg, max(abs(v2 - v1))) @@ -124,3 +196,4 @@ def verify_similarity_of_algo_results(serie = [], Xa = {}): if __name__ == "__main__": print '\n AUTODIAGNOSTIC \n' test1() + test2() -- 2.39.2