1 #-*-coding:iso-8859-1-*-
3 # Copyright (C) 2008-2017 EDF R&D
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
19 # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
21 # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
22 "Verification de la disponibilite de l'ensemble des algorithmes"
24 # ==============================================================================
25 import adaoBuilder, numpy
27 """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur matriciel)"""
29 for algo in ("3DVAR", "Blue", "ExtendedBlue", "LinearLeastSquares", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
31 msg = "Algorithme en test : %s"%algo
32 print msg+"\n"+"-"*len(msg)
34 adaopy = adaoBuilder.New()
35 adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
36 adaopy.setBackground (Vector = [0,1,2])
37 adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
38 adaopy.setObservation (Vector = [0.5,1.5,2.5])
39 adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
40 adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3")
41 adaopy.setObserver("Analysis",Template="ValuePrinter")
43 Xa[algo] = adaopy.get("Analysis")[-1]
46 for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
48 msg = "Algorithme en test : %s"%algo
49 print msg+"\n"+"-"*len(msg)
51 adaopy = adaoBuilder.New()
52 adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, })
53 adaopy.setBackground (Vector = [0,1,2])
54 adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
55 adaopy.setObservation (Vector = [0.5,1.5,2.5])
56 adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
57 adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3")
58 adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
59 adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
60 adaopy.setObserver("Analysis",Template="ValuePrinter")
62 Xa[algo] = adaopy.get("Analysis")[-1]
65 for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
67 msg = "Algorithme en test : %s"%algo
68 print msg+"\n"+"-"*len(msg)
70 adaopy = adaoBuilder.New()
71 adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000, })
72 adaopy.setBackground (Vector = [0,1,2])
73 adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
74 adaopy.setObservation (Vector = [0.5,1.5,2.5])
75 adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
76 adaopy.setObservationOperator(Matrix = "1 0 0;0 1 0;0 0 1")
77 adaopy.setObserver("Analysis",Template="ValuePrinter")
79 Xa[algo] = adaopy.get("Analysis")[-1]
82 for algo in ("EnsembleBlue", ):
84 msg = "Algorithme en test : %s"%algo
85 print msg+"\n"+"-"*len(msg)
87 adaopy = adaoBuilder.New()
88 adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"SetSeed":1000, })
89 adaopy.setBackground (VectorSerie = 100*[[0,1,2]])
90 adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
91 adaopy.setObservation (Vector = [0.5,1.5,2.5])
92 adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
93 adaopy.setObservationOperator(Matrix = "1 0 0;0 1 0;0 0 1")
94 adaopy.setObserver("Analysis",Template="ValuePrinter")
96 Xa[algo] = adaopy.get("Analysis")[-1]
100 msg = "Tests des ecarts attendus :"
101 print msg+"\n"+"="*len(msg)
102 verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa)
103 verify_similarity_of_algo_results(("LinearLeastSquares", "NonLinearLeastSquares"), Xa)
104 verify_similarity_of_algo_results(("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter"), Xa)
105 print " Les resultats obtenus sont corrects."
111 """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur fonctionnel)"""
113 M = numpy.matrix("1 0 0;0 2 0;0 0 3")
114 def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
115 for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
117 msg = "Algorithme en test : %s"%algo
118 print msg+"\n"+"-"*len(msg)
120 adaopy = adaoBuilder.New()
121 adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
122 adaopy.setBackground (Vector = [0,1,2])
123 adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
124 adaopy.setObservation (Vector = [0.5,1.5,2.5])
125 adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
126 adaopy.setObservationOperator(OneFunction = H)
127 adaopy.setObserver("Analysis",Template="ValuePrinter")
129 Xa[algo] = adaopy.get("Analysis")[-1]
132 M = numpy.matrix("1 0 0;0 2 0;0 0 3")
133 def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
134 for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
136 msg = "Algorithme en test : %s"%algo
137 print msg+"\n"+"-"*len(msg)
139 adaopy = adaoBuilder.New()
140 adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, })
141 adaopy.setBackground (Vector = [0,1,2])
142 adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
143 adaopy.setObservation (Vector = [0.5,1.5,2.5])
144 adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
145 adaopy.setObservationOperator(OneFunction = H)
146 adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
147 adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
148 adaopy.setObserver("Analysis",Template="ValuePrinter")
150 Xa[algo] = adaopy.get("Analysis")[-1]
153 M = numpy.matrix("1 0 0;0 1 0;0 0 1")
154 def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
155 for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
157 msg = "Algorithme en test : %s"%algo
158 print msg+"\n"+"-"*len(msg)
160 adaopy = adaoBuilder.New()
161 adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000, })
162 adaopy.setBackground (Vector = [0,1,2])
163 adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
164 adaopy.setObservation (Vector = [0.5,1.5,2.5])
165 adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
166 adaopy.setObservationOperator(OneFunction = H)
167 adaopy.setObserver("Analysis",Template="ValuePrinter")
169 Xa[algo] = adaopy.get("Analysis")[-1]
173 msg = "Tests des ecarts attendus :"
174 print msg+"\n"+"="*len(msg)
175 verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa)
176 verify_similarity_of_algo_results(("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter"), Xa)
177 print " Les resultats obtenus sont corrects."
182 def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""):
183 """Comparaison de deux vecteurs"""
184 print " Difference maximale %s: %.2e"%(msg, max(abs(v2 - v1)))
185 return max(abs(v2 - v1)) < precision
187 def verify_similarity_of_algo_results(serie = [], Xa = {}):
188 print " Comparaisons :"
191 if algo1 is algo2: break
192 assert almost_equal_vectors( Xa[algo1], Xa[algo2], 5.e-5, "entre %s et %s "%(algo1, algo2) )
193 print " Algorithmes dont les resultats sont similaires : %s\n"%(serie,)
195 #===============================================================================
196 if __name__ == "__main__":
197 print '\n AUTODIAGNOSTIC \n'