1 #-*-coding:iso-8859-1-*-
3 # Copyright (C) 2008-2017 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22 "Verification de la disponibilite de l'ensemble des algorithmes"
24 # ==============================================================================
25 import adaoBuilder, numpy
27 """Verification de la disponibilite de l'ensemble des algorithmes"""
29 for algo in ("3DVAR", "Blue", "ExtendedBlue", "LinearLeastSquares", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
31 msg = "Algorithme en test : %s"%algo
32 print msg+"\n"+"-"*len(msg)
34 adaopy = adaoBuilder.New()
35 adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
36 adaopy.setBackground (Vector = [0,1,2])
37 adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
38 adaopy.setObservation (Vector = [0.5,1.5,2.5])
39 adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
40 adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3")
41 adaopy.setObserver("Analysis",Template="ValuePrinter")
43 Xa[algo] = adaopy.get("Analysis")[-1]
46 for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
48 msg = "Algorithme en test : %s"%algo
49 print msg+"\n"+"-"*len(msg)
51 adaopy = adaoBuilder.New()
52 adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, })
53 adaopy.setBackground (Vector = [0,1,2])
54 adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
55 adaopy.setObservation (Vector = [0.5,1.5,2.5])
56 adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
57 adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3")
58 adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
59 adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
60 adaopy.setObserver("Analysis",Template="ValuePrinter")
62 Xa[algo] = adaopy.get("Analysis")[-1]
65 for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
67 msg = "Algorithme en test : %s"%algo
68 print msg+"\n"+"-"*len(msg)
70 adaopy = adaoBuilder.New()
71 adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000, })
72 adaopy.setBackground (Vector = [0,1,2])
73 adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
74 adaopy.setObservation (Vector = [0.5,1.5,2.5])
75 adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
76 adaopy.setObservationOperator(Matrix = "1 0 0;0 1 0;0 0 1")
77 adaopy.setObserver("Analysis",Template="ValuePrinter")
79 Xa[algo] = adaopy.get("Analysis")[-1]
82 for algo in ("EnsembleBlue", ):
84 msg = "Algorithme en test : %s"%algo
85 print msg+"\n"+"-"*len(msg)
87 adaopy = adaoBuilder.New()
88 adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"SetSeed":1000, })
89 adaopy.setBackground (VectorSerie = 100*[[0,1,2]])
90 adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
91 adaopy.setObservation (Vector = [0.5,1.5,2.5])
92 adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
93 adaopy.setObservationOperator(Matrix = "1 0 0;0 1 0;0 0 1")
94 adaopy.setObserver("Analysis",Template="ValuePrinter")
96 Xa[algo] = adaopy.get("Analysis")[-1]
100 msg = "Tests des ecarts attendus :"
101 print msg+"\n"+"="*len(msg)
102 verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa)
103 verify_similarity_of_algo_results(("LinearLeastSquares", "NonLinearLeastSquares"), Xa)
104 verify_similarity_of_algo_results(("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter"), Xa)
105 print " Les resultats obtenus sont corrects."
110 def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""):
111 """Comparaison de deux vecteurs"""
112 print " Difference maximale %s: %.2e"%(msg, max(abs(v2 - v1)))
113 return max(abs(v2 - v1)) < precision
115 def verify_similarity_of_algo_results(serie = [], Xa = {}):
116 print " Comparaisons :"
119 if algo1 is algo2: break
120 assert almost_equal_vectors( Xa[algo1], Xa[algo2], 5.e-5, "entre %s et %s "%(algo1, algo2) )
121 print " Algorithmes dont les resultats sont similaires : %s\n"%(serie,)
123 #===============================================================================
124 if __name__ == "__main__":
125 print '\n AUTODIAGNOSTIC \n'