]> SALOME platform Git repositories - modules/adao.git/commitdiff
Salome HOME
Minor source and test corrections for precision control
authorJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Wed, 25 Jan 2017 20:59:13 +0000 (21:59 +0100)
committerJean-Philippe ARGAUD <jean-philippe.argaud@edf.fr>
Wed, 25 Jan 2017 20:59:13 +0000 (21:59 +0100)
src/daComposant/daAlgorithms/3DVAR.py
src/daComposant/daAlgorithms/Blue.py
src/daComposant/daAlgorithms/ExtendedBlue.py
test/test6901/Verification_des_Assimilation_Algorithms.py

index 2c80c6d032b83ae8c8e9b9b377cc135952aff0e1..50193f79a0689761fbcb9a7b361d80e12e0e115d 100644 (file)
@@ -168,9 +168,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
             if "InnovationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
                 self.StoredVariables["InnovationAtCurrentState"].store( _Innovation )
             #
-            Jb  = 0.5 * (_X - Xb).T * BI * (_X - Xb)
-            Jo  = 0.5 * _Innovation.T * RI * _Innovation
-            J   = float( Jb ) + float( Jo )
+            Jb  = float( 0.5 * (_X - Xb).T * BI * (_X - Xb) )
+            Jo  = float( 0.5 * _Innovation.T * RI * _Innovation )
+            J   = Jb + Jo
             #
             self.StoredVariables["CostFunctionJb"].store( Jb )
             self.StoredVariables["CostFunctionJo"].store( Jo )
index f59dc3812bb7ac7d9efe31786528680c8749ef6c..2f6c2d87eebee8afc820a7c5f0255e0193f22e0e 100644 (file)
@@ -132,9 +132,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
            "CostFunctionJ"                 in self._parameters["StoreSupplementaryCalculations"] or \
            "MahalanobisConsistency"        in self._parameters["StoreSupplementaryCalculations"]:
             #
-            Jb  = 0.5 * (Xa - Xb).T * BI * (Xa - Xb)
-            Jo  = 0.5 * oma.T * RI * oma
-            J   = float( Jb ) + float( Jo )
+            Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
+            Jo  = float( 0.5 * oma.T * RI * oma )
+            J   = Jb + Jo
             #
             self.StoredVariables["CostFunctionJb"].store( Jb )
             self.StoredVariables["CostFunctionJo"].store( Jo )
index ff576103879fdee1ceb0433eff31ba7bbe9d77e8..4ecda94f7ad2ace49106b7976d37e8fbe9ca33fd 100644 (file)
@@ -132,9 +132,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm):
         if self._parameters["StoreInternalVariables"] or \
            "CostFunctionJ"                 in self._parameters["StoreSupplementaryCalculations"] or \
            "MahalanobisConsistency"        in self._parameters["StoreSupplementaryCalculations"]:
-            Jb  = 0.5 * (Xa - Xb).T * BI * (Xa - Xb)
-            Jo  = 0.5 * oma.T * RI * oma
-            J   = float( Jb ) + float( Jo )
+            Jb  = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) )
+            Jo  = float( 0.5 * oma.T * RI * oma )
+            J   = Jb + Jo
             self.StoredVariables["CostFunctionJb"].store( Jb )
             self.StoredVariables["CostFunctionJo"].store( Jo )
             self.StoredVariables["CostFunctionJ" ].store( J )
index ab34c37b8f229a469783f67f3e70e6daa0f2726b..e933b9513c76c85313fd96ad3448ec5ca27a3874 100644 (file)
@@ -24,7 +24,7 @@
 # ==============================================================================
 import adaoBuilder, numpy
 def test1():
-    """Verification de la disponibilite de l'ensemble des algorithmes"""
+    """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur matriciel)"""
     Xa = {}
     for algo in ("3DVAR", "Blue", "ExtendedBlue", "LinearLeastSquares", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
         print
@@ -107,6 +107,78 @@ def test1():
     #
     return 0
 
+def test2():
+    """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur fonctionnel)"""
+    Xa = {}
+    M = numpy.matrix("1 0 0;0 2 0;0 0 3")
+    def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
+    for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
+        print
+        msg = "Algorithme en test : %s"%algo
+        print msg+"\n"+"-"*len(msg)
+        #
+        adaopy = adaoBuilder.New()
+        adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
+        adaopy.setBackground         (Vector = [0,1,2])
+        adaopy.setBackgroundError    (ScalarSparseMatrix = 1.)
+        adaopy.setObservation        (Vector = [0.5,1.5,2.5])
+        adaopy.setObservationError   (DiagonalSparseMatrix = "1 1 1")
+        adaopy.setObservationOperator(OneFunction = H)
+        adaopy.setObserver("Analysis",Template="ValuePrinter")
+        adaopy.execute()
+        Xa[algo] = adaopy.get("Analysis")[-1]
+        del adaopy
+    #
+    M = numpy.matrix("1 0 0;0 2 0;0 0 3")
+    def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
+    for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+        print
+        msg = "Algorithme en test : %s"%algo
+        print msg+"\n"+"-"*len(msg)
+        #
+        adaopy = adaoBuilder.New()
+        adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, })
+        adaopy.setBackground         (Vector = [0,1,2])
+        adaopy.setBackgroundError    (ScalarSparseMatrix = 1.)
+        adaopy.setObservation        (Vector = [0.5,1.5,2.5])
+        adaopy.setObservationError   (DiagonalSparseMatrix = "1 1 1")
+        adaopy.setObservationOperator(OneFunction = H)
+        adaopy.setEvolutionError     (ScalarSparseMatrix = 1.)
+        adaopy.setEvolutionModel     (Matrix = "1 0 0;0 1 0;0 0 1")
+        adaopy.setObserver("Analysis",Template="ValuePrinter")
+        adaopy.execute()
+        Xa[algo] = adaopy.get("Analysis")[-1]
+        del adaopy
+    #
+    M = numpy.matrix("1 0 0;0 1 0;0 0 1")
+    def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
+    for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
+        print
+        msg = "Algorithme en test : %s"%algo
+        print msg+"\n"+"-"*len(msg)
+        #
+        adaopy = adaoBuilder.New()
+        adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000, })
+        adaopy.setBackground         (Vector = [0,1,2])
+        adaopy.setBackgroundError    (ScalarSparseMatrix = 1.)
+        adaopy.setObservation        (Vector = [0.5,1.5,2.5])
+        adaopy.setObservationError   (DiagonalSparseMatrix = "1 2 3")
+        adaopy.setObservationOperator(OneFunction = H)
+        adaopy.setObserver("Analysis",Template="ValuePrinter")
+        adaopy.execute()
+        Xa[algo] = adaopy.get("Analysis")[-1]
+        del adaopy
+    #
+    print
+    msg = "Tests des ecarts attendus :"
+    print msg+"\n"+"="*len(msg)
+    verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa)
+    verify_similarity_of_algo_results(("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter"), Xa)
+    print "  Les resultats obtenus sont corrects."
+    print
+    #
+    return 0
+
 def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""):
     """Comparaison de deux vecteurs"""
     print "    Difference maximale %s: %.2e"%(msg, max(abs(v2 - v1)))
@@ -124,3 +196,4 @@ def verify_similarity_of_algo_results(serie = [], Xa = {}):
 if __name__ == "__main__":
     print '\n AUTODIAGNOSTIC \n'
     test1()
+    test2()