default = 1.e-7,
typecast = float,
message = "Diminution relative minimale du coût lors de l'arrêt",
+ minval = 0.,
)
self.defineRequiredParameter(
name = "ProjectedGradientTolerance",
default = 1.e-05,
typecast = float,
message = "Maximum des composantes du gradient lors de l'arrêt",
+ minval = 0.,
)
self.defineRequiredParameter(
name = "StoreInternalVariables",
L = numpy.linalg.cholesky( A )
except:
raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(self._name,))
+ if self._toStore("APosterioriCovariance"):
self.StoredVariables["APosterioriCovariance"].store( A )
#
# Calculs et/ou stockages supplémentaires
default = 1.e-7,
typecast = float,
message = "Diminution relative minimale du coût lors de l'arrêt",
+ minval = 0.,
)
self.defineRequiredParameter(
name = "ProjectedGradientTolerance",
default = 1.e-05,
typecast = float,
message = "Maximum des composantes du gradient lors de l'arrêt",
+ minval = 0.,
)
self.defineRequiredParameter(
name = "StoreInternalVariables",
message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
listval = [
"BMA",
- "CurrentState",
"CostFunctionJ",
- "CostFunctionJb",
- "CostFunctionJo",
- "IndexOfOptimum",
- "CurrentOptimum",
"CostFunctionJAtCurrentOptimum",
+ "CostFunctionJb",
"CostFunctionJbAtCurrentOptimum",
+ "CostFunctionJo",
"CostFunctionJoAtCurrentOptimum",
+ "CurrentOptimum",
+ "CurrentState",
+ "IndexOfOptimum",
]
)
self.defineRequiredParameter( # Pas de type
default = 1.e-7,
typecast = float,
message = "Diminution relative minimale du coût lors de l'arrêt",
+ minval = 0.,
)
self.defineRequiredParameter(
name = "ProjectedGradientTolerance",
default = 1.e-05,
typecast = float,
message = "Maximum des composantes du gradient lors de l'arrêt",
+ minval = 0.,
)
self.defineRequiredParameter(
name = "StoreInternalVariables",
message = "Liste de calculs supplémentaires à stocker et/ou effectuer",
listval = [
"BMA",
- "OMA",
- "OMB",
"CostFunctionJ",
+ "CostFunctionJAtCurrentOptimum",
"CostFunctionJb",
+ "CostFunctionJbAtCurrentOptimum",
"CostFunctionJo",
- "CurrentState",
+ "CostFunctionJoAtCurrentOptimum",
"CurrentOptimum",
+ "CurrentState",
"IndexOfOptimum",
"Innovation",
"InnovationAtCurrentState",
- "CostFunctionJAtCurrentOptimum",
- "CostFunctionJbAtCurrentOptimum",
- "CostFunctionJoAtCurrentOptimum",
+ "OMA",
+ "OMB",
"SimulatedObservationAtBackground",
+ "SimulatedObservationAtCurrentOptimum",
"SimulatedObservationAtCurrentState",
"SimulatedObservationAtOptimum",
- "SimulatedObservationAtCurrentOptimum",
]
)
self.defineRequiredParameter( # Pas de type
- IndexOfOptimum : index de l'état optimal courant lors d'itérations
- Innovation : l'innovation : d = Y - H(X)
- InnovationAtCurrentState : l'innovation à l'état courant : dn = Y - H(Xn)
- - JacobianMatrixAtBackground : matrice jacobienne à l'ébauche
+ - JacobianMatrixAtCurrentState : matrice jacobienne à l'état courant
- JacobianMatrixAtOptimum : matrice jacobienne à l'optimum
+ - KalmanGainAtOptimum : gain de Kalman à l'optimum
- MahalanobisConsistency : indicateur de consistance des covariances
- OMA : Observation moins Analyse : Y - Xa
- OMB : Observation moins Background : Y - Xb
self.StoredVariables["IndexOfOptimum"] = Persistence.OneIndex(name = "IndexOfOptimum")
self.StoredVariables["Innovation"] = Persistence.OneVector(name = "Innovation")
self.StoredVariables["InnovationAtCurrentState"] = Persistence.OneVector(name = "InnovationAtCurrentState")
- self.StoredVariables["JacobianMatrixAtBackground"] = Persistence.OneMatrix(name = "JacobianMatrixAtBackground")
+ self.StoredVariables["JacobianMatrixAtCurrentState"] = Persistence.OneMatrix(name = "JacobianMatrixAtCurrentState")
self.StoredVariables["JacobianMatrixAtOptimum"] = Persistence.OneMatrix(name = "JacobianMatrixAtOptimum")
+ self.StoredVariables["KalmanGainAtOptimum"] = Persistence.OneMatrix(name = "KalmanGainAtOptimum")
self.StoredVariables["MahalanobisConsistency"] = Persistence.OneScalar(name = "MahalanobisConsistency")
self.StoredVariables["OMA"] = Persistence.OneVector(name = "OMA")
self.StoredVariables["OMB"] = Persistence.OneVector(name = "OMB")
test6901
test6902
test6903
+ test6904
)
cp -R $(DIR)test6901 $(SALOMETESTDIR)
cp -R $(DIR)test6902 $(SALOMETESTDIR)
cp -R $(DIR)test6903 $(SALOMETESTDIR)
+ cp -R $(DIR)test6904 $(SALOMETESTDIR)
cp $(DIR)CTestTestfileInstall.cmake.in $(SALOMETESTDIR)/CTestTestfile.cmake
uninstall-local:
rm -rf $(SALOMETESTDIR)/test6901
rm -rf $(SALOMETESTDIR)/test6902
rm -rf $(SALOMETESTDIR)/test6903
+ rm -rf $(SALOMETESTDIR)/test6904
rm $(SALOMETESTDIR)/CTestTestfile.cmake
+++ /dev/null
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2008-2019 EDF R&D
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-#
-# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
-#
-# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
-
-"""
- Unittest extensions for Numpy objects
-"""
-__author__ = "Jean-Philippe ARGAUD"
-__all__ = ["assertAlmostEqualVector"]
-
-import numpy
-
-# ==============================================================================
-def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None):
- "Compare two vectors, like unittest.assertAlmostEqual"
- if msg is not None:
- print(msg)
- if delta is not None:
- if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any():
- raise AssertionError("%s != %s within %s places"%(first,second,delta))
- else:
- if ( (numpy.asarray(first) - numpy.asarray(second)) > 10**(-int(places)) ).any():
- raise AssertionError("%s != %s within %i places"%(first,second,places))
- return max(abs(numpy.asarray(first) - numpy.asarray(second)))
+++ /dev/null
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2008-2019 EDF R&D
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-#
-# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
-#
-# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
-
-"""
- Unittest extensions for Numpy objects
-"""
-__author__ = "Jean-Philippe ARGAUD"
-__all__ = ["assertAlmostEqualVector"]
-
-import numpy
-
-# ==============================================================================
-def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None):
- "Compare two vectors, like unittest.assertAlmostEqual"
- if msg is not None:
- print(msg)
- if delta is not None:
- if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any():
- raise AssertionError("%s != %s within %s places"%(first,second,delta))
- else:
- if ( (numpy.asarray(first) - numpy.asarray(second)) > 10**(-int(places)) ).any():
- raise AssertionError("%s != %s within %i places"%(first,second,places))
- return max(abs(numpy.asarray(first) - numpy.asarray(second)))
+++ /dev/null
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2008-2019 EDF R&D
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-#
-# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
-#
-# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
-
-"""
- Unittest extensions for Numpy objects
-"""
-__author__ = "Jean-Philippe ARGAUD"
-__all__ = ["assertAlmostEqualVector"]
-
-import numpy
-
-# ==============================================================================
-def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None):
- "Compare two vectors, like unittest.assertAlmostEqual"
- if msg is not None:
- print(msg)
- if delta is not None:
- if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any():
- raise AssertionError("%s != %s within %s places"%(first,second,delta))
- else:
- if ( (numpy.asarray(first) - numpy.asarray(second)) > 10**(-int(places)) ).any():
- raise AssertionError("%s != %s within %i places"%(first,second,places))
- return max(abs(numpy.asarray(first) - numpy.asarray(second)))
+++ /dev/null
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2008-2019 EDF R&D
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-#
-# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
-#
-# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
-
-"""
- Unittest extensions for Numpy objects
-"""
-__author__ = "Jean-Philippe ARGAUD"
-__all__ = ["assertAlmostEqualVector"]
-
-import numpy
-
-# ==============================================================================
-def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None):
- "Compare two vectors, like unittest.assertAlmostEqual"
- if msg is not None:
- print(msg)
- if delta is not None:
- if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any():
- raise AssertionError("%s != %s within %s places"%(first,second,delta))
- else:
- if ( (numpy.asarray(first) - numpy.asarray(second)) > 10**(-int(places)) ).any():
- raise AssertionError("%s != %s within %i places"%(first,second,places))
- return max(abs(numpy.asarray(first) - numpy.asarray(second)))
+++ /dev/null
-# -*- coding: utf-8 -*-
-#
-# Copyright (C) 2008-2019 EDF R&D
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-#
-# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
-#
-# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
-
-"""
- Unittest extensions for Numpy objects
-"""
-__author__ = "Jean-Philippe ARGAUD"
-__all__ = ["assertAlmostEqualVector"]
-
-import numpy
-
-# ==============================================================================
-def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None):
- "Compare two vectors, like unittest.assertAlmostEqual"
- if msg is not None:
- print(msg)
- if delta is not None:
- if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any():
- raise AssertionError("%s != %s within %s places"%(first,second,delta))
- else:
- if ( (numpy.asarray(first) - numpy.asarray(second)) > 10**(-int(places)) ).any():
- raise AssertionError("%s != %s within %i places"%(first,second,places))
- return max(abs(numpy.asarray(first) - numpy.asarray(second)))
# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
"Verification de la disponibilite de l'ensemble des algorithmes"
-# ==============================================================================
-import numpy, sys
+import sys
+import unittest
+import numpy
from adao import adaoBuilder
-def test1():
- """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur matriciel)"""
- print(test1.__doc__)
- Xa = {}
- for algo in ("3DVAR", "Blue", "ExtendedBlue", "LinearLeastSquares", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
- print("")
- msg = "Algorithme en test : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+
+# ==============================================================================
+class InTest(unittest.TestCase):
+ def test1(self):
+ """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur matriciel)"""
+ print(self.test1.__doc__)
+ Xa = {}
+ for algo in ("3DVAR", "Blue", "ExtendedBlue", "LinearLeastSquares", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
+ print("")
+ msg = "Algorithme en test : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
+ adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3")
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa[algo] = adaopy.get("Analysis")[-1]
+ del adaopy
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
- adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3")
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa[algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "EnsembleKalmanFilter", "4DVAR"):
- print("")
- msg = "Algorithme en test : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "EnsembleKalmanFilter", "4DVAR"):
+ print("")
+ msg = "Algorithme en test : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
+ adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3")
+ adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
+ adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa[algo] = adaopy.get("Analysis")[-1]
+ del adaopy
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
- adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3")
- adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
- adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa[algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
- print("")
- msg = "Algorithme en test : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+ for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
+ print("")
+ msg = "Algorithme en test : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
+ adaopy.setObservationOperator(Matrix = "1 0 0;0 1 0;0 0 1")
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa[algo] = adaopy.get("Analysis")[-1]
+ del adaopy
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
- adaopy.setObservationOperator(Matrix = "1 0 0;0 1 0;0 0 1")
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa[algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- for algo in ("EnsembleBlue", ):
+ for algo in ("EnsembleBlue", ):
+ print("")
+ msg = "Algorithme en test : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"SetSeed":1000, })
+ adaopy.setBackground (VectorSerie = 100*[[0,1,2]])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
+ adaopy.setObservationOperator(Matrix = "1 0 0;0 1 0;0 0 1")
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa[algo] = adaopy.get("Analysis")[-1]
+ del adaopy
+ #
+ print("")
+ msg = "Tests des ecarts attendus :"
+ print(msg+"\n"+"="*len(msg))
+ verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa, 5.e-5)
+ verify_similarity_of_algo_results(("LinearLeastSquares", "NonLinearLeastSquares"), Xa, 5.e-7)
+ verify_similarity_of_algo_results(("KalmanFilter", "ExtendedKalmanFilter", "UnscentedKalmanFilter"), Xa, 1.e-14)
+ verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 5.e-2)
+ print(" Les resultats obtenus sont corrects.")
print("")
- msg = "Algorithme en test : %s"%algo
- print(msg+"\n"+"-"*len(msg))
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"SetSeed":1000, })
- adaopy.setBackground (VectorSerie = 100*[[0,1,2]])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
- adaopy.setObservationOperator(Matrix = "1 0 0;0 1 0;0 0 1")
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa[algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- print("")
- msg = "Tests des ecarts attendus :"
- print(msg+"\n"+"="*len(msg))
- verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa, 5.e-5)
- verify_similarity_of_algo_results(("LinearLeastSquares", "NonLinearLeastSquares"), Xa, 5.e-7)
- verify_similarity_of_algo_results(("KalmanFilter", "ExtendedKalmanFilter", "UnscentedKalmanFilter"), Xa, 1.e-14)
- verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 5.e-2)
- print(" Les resultats obtenus sont corrects.")
- print("")
- #
- return 0
+ return 0
-def test2():
- """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur fonctionnel)"""
- print(test2.__doc__)
- Xa = {}
- M = numpy.matrix("1 0 0;0 2 0;0 0 3")
- def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
- for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
- print("")
- msg = "Algorithme en test : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+ def test2(self):
+ """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur fonctionnel)"""
+ print(self.test2.__doc__)
+ Xa = {}
+ M = numpy.matrix("1 0 0;0 2 0;0 0 3")
+ def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
+ for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
+ print("")
+ msg = "Algorithme en test : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
+ adaopy.setObservationOperator(OneFunction = H)
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa[algo] = adaopy.get("Analysis")[-1]
+ del adaopy
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
- adaopy.setObservationOperator(OneFunction = H)
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa[algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- M = numpy.matrix("1 0 0;0 2 0;0 0 3")
- def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
- for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
- print("")
- msg = "Algorithme en test : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+ M = numpy.matrix("1 0 0;0 2 0;0 0 3")
+ def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
+ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+ print("")
+ msg = "Algorithme en test : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
+ adaopy.setObservationOperator(OneFunction = H)
+ adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
+ adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa[algo] = adaopy.get("Analysis")[-1]
+ del adaopy
+ #
+ M = numpy.matrix("1 0 0;0 1 0;0 0 1")
+ def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
+ for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
+ print("")
+ msg = "Algorithme en test : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
+ adaopy.setObservationOperator(OneFunction = H)
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa[algo] = adaopy.get("Analysis")[-1]
+ del adaopy
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
- adaopy.setObservationOperator(OneFunction = H)
- adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
- adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa[algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- M = numpy.matrix("1 0 0;0 1 0;0 0 1")
- def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
- for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
print("")
- msg = "Algorithme en test : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+ msg = "Tests des ecarts attendus :"
+ print(msg+"\n"+"="*len(msg))
+ verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa, 5.e-5)
+ verify_similarity_of_algo_results(("KalmanFilter", "ExtendedKalmanFilter", "UnscentedKalmanFilter"), Xa, 1.e14)
+ verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 5.e-2)
+ print(" Les resultats obtenus sont corrects.")
+ print("")
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
- adaopy.setObservationOperator(OneFunction = H)
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa[algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- print("")
- msg = "Tests des ecarts attendus :"
- print(msg+"\n"+"="*len(msg))
- verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa, 5.e-5)
- verify_similarity_of_algo_results(("KalmanFilter", "ExtendedKalmanFilter", "UnscentedKalmanFilter"), Xa, 1.e14)
- verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 5.e-2)
- print(" Les resultats obtenus sont corrects.")
- print("")
- #
- return 0
+ return 0
def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""):
"""Comparaison de deux vecteurs"""
#===============================================================================
if __name__ == "__main__":
- print('\nAUTODIAGNOSTIC\n')
- test1()
- test2()
+ print("\nAUTODIAGNOSTIC\n==============")
+ unittest.main()
#
# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
+import sys
+import unittest
+import numpy
from adao import adaoBuilder
# ==============================================================================
-def test1():
- for algo in ("AdjointTest", "FunctionTest", "GradientTest", "LinearityTest", "TangentTest"):
- print("")
- msg = "Algorithme en test : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+class InTest(unittest.TestCase):
+ def test1(self):
+ for algo in ("AdjointTest", "FunctionTest", "GradientTest", "LinearityTest", "TangentTest"):
+ print("")
+ msg = "Algorithme en test : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10,"NumberOfRepetition":2, "SetSeed":1000})
+ adaopy.setCheckingPoint (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
+ adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3")
+ adaopy.execute()
+ del adaopy
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10,"NumberOfRepetition":2, "SetSeed":1000})
- adaopy.setCheckingPoint (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
- adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3")
- adaopy.execute()
- del adaopy
- #
- for algo in ("ObserverTest", ):
- print("")
- msg = "Algorithme en test : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+ for algo in ("ObserverTest", ):
+ print("")
+ msg = "Algorithme en test : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo)
+ adaopy.setCheckingPoint (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
+ adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3")
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ del adaopy
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo)
- adaopy.setCheckingPoint (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
- adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3")
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- del adaopy
- #
- for algo in ("SamplingTest", ):
- print("")
- msg = "Algorithme en test : %s"%algo
- print(msg+"\n"+"-"*len(msg))
- #
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={
- "StoreSupplementaryCalculations":["CostFunctionJ","CurrentState",],
- "SampleAsMinMaxStepHyperCube":[[-1.,1.,1.],[0,2,1],[1,3,1]],
- })
- adaopy.setCheckingPoint (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
- adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3")
- adaopy.setObserver ("CurrentState",Template="ValuePrinter")
- adaopy.execute()
- del adaopy
+ for algo in ("SamplingTest", ):
+ print("")
+ msg = "Algorithme en test : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={
+ "StoreSupplementaryCalculations":["CostFunctionJ","CurrentState",],
+ "SampleAsMinMaxStepHyperCube":[[-1.,1.,1.],[0,2,1],[1,3,1]],
+ })
+ adaopy.setCheckingPoint (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
+ adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3")
+ adaopy.setObserver ("CurrentState",Template="ValuePrinter")
+ adaopy.execute()
+ del adaopy
-# ==============================================================================
+#===============================================================================
if __name__ == "__main__":
- print('\nAUTODIAGNOSTIC\n')
- test1()
+ print("\nAUTODIAGNOSTIC\n==============")
+ unittest.main()
# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
"Verification du fonctionnement correct d'entrees en mono ou multi-fonctions"
-# ==============================================================================
-import numpy, sys
+import sys
+import unittest
+import numpy
from adao import adaoBuilder
+# ==============================================================================
+
M = numpy.matrix("1 0 0;0 2 0;0 0 3")
def MonoFonction( x ):
return M * numpy.asmatrix(numpy.ravel( x )).T
return _mulHX
# ==============================================================================
-def test1():
- """
- Verification du fonctionnement identique pour les algorithmes non-temporels
- en utilisant une fonction lineaire et carree
- """
- print(test1.__doc__)
- Xa = {}
- #
- for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
- print("")
- msg = "Algorithme en test en MonoFonction : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+class InTest(unittest.TestCase):
+ def test1(self):
+ """
+ Verification du fonctionnement identique pour les algorithmes non-temporels
+ en utilisant une fonction lineaire et carree
+ """
+ print(self.test1.__doc__)
+ Xa = {}
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
- adaopy.setObservationOperator(OneFunction = MonoFonction)
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa["Mono/"+algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
+ for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
+ print("")
+ msg = "Algorithme en test en MonoFonction : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
+ adaopy.setObservationOperator(OneFunction = MonoFonction)
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa["Mono/"+algo] = adaopy.get("Analysis")[-1]
+ del adaopy
+ #
+ for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
+ print("")
+ msg = "Algorithme en test en MultiFonction : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
+ adaopy.setObservationOperator(OneFunction = MultiFonction, InputFunctionAsMulti = True)
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa["Multi/"+algo] = adaopy.get("Analysis")[-1]
+ del adaopy
+ #
+ print("")
+ msg = "Tests des ecarts attendus :"
+ print(msg+"\n"+"="*len(msg))
+ for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
+ verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20)
+ print(" Les resultats obtenus sont corrects.")
print("")
- msg = "Algorithme en test en MultiFonction : %s"%algo
- print(msg+"\n"+"-"*len(msg))
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
- adaopy.setObservationOperator(OneFunction = MultiFonction, InputFunctionAsMulti = True)
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa["Multi/"+algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- print("")
- msg = "Tests des ecarts attendus :"
- print(msg+"\n"+"="*len(msg))
- for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
- verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20)
- print(" Les resultats obtenus sont corrects.")
- print("")
- #
- return 0
-
+ return 0
+#
# ==============================================================================
def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""):
"""Comparaison de deux vecteurs"""
print(" Difference maximale %s: %.2e"%(msg, max(abs(v2 - v1))))
return max(abs(v2 - v1)) < precision
-
+#
def verify_similarity_of_algo_results(serie = [], Xa = {}, precision = 1.e-15):
print(" Comparaisons :")
for algo1 in serie:
assert almost_equal_vectors( Xa[algo1], Xa[algo2], precision, "entre %s et %s "%(algo1, algo2) )
print(" Algorithmes dont les resultats sont similaires a %.0e : %s\n"%(precision, serie,))
sys.stdout.flush()
-
+#
#===============================================================================
if __name__ == "__main__":
- print('\nAUTODIAGNOSTIC\n')
- test1()
+ print("\nAUTODIAGNOSTIC\n==============")
+ unittest.main()
# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
"Verification du fonctionnement correct d'entrees en mono ou multi-fonctions"
-# ==============================================================================
-import numpy, sys
+import sys
+import unittest
+import numpy
from adao import adaoBuilder
+# ==============================================================================
+
M = numpy.matrix("1 0 0;0 2 0;0 0 3")
def MonoFonction( x ):
return M * numpy.asmatrix(numpy.ravel( x )).T
return _mulHX
# ==============================================================================
-def test1():
- """
- Verification du fonctionnement identique pour les algorithmes temporels
- en utilisant une fonction lineaire et carree
- """
- print(test1.__doc__)
- Xa = {}
- #
- for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
- print("")
- msg = "Algorithme en test en MonoFonction : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+class InTest(unittest.TestCase):
+ def test1(self):
+ """
+ Verification du fonctionnement identique pour les algorithmes temporels
+ en utilisant une fonction lineaire et carree
+ """
+ print(self.test1.__doc__)
+ Xa = {}
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
- adaopy.setObservationOperator(OneFunction = MonoFonction)
- adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
- adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa["Mono/"+algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+ print("")
+ msg = "Algorithme en test en MonoFonction : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
+ adaopy.setObservationOperator(OneFunction = MonoFonction)
+ adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
+ adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa["Mono/"+algo] = adaopy.get("Analysis")[-1]
+ del adaopy
+ #
+ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+ print("")
+ msg = "Algorithme en test en MultiFonction : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
+ adaopy.setObservationOperator(OneFunction = MultiFonction, InputFunctionAsMulti = True)
+ adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
+ adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa["Multi/"+algo] = adaopy.get("Analysis")[-1]
+ del adaopy
+ #
+ print("")
+ msg = "Tests des ecarts attendus :"
+ print(msg+"\n"+"="*len(msg))
+ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+ verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20)
+ print(" Les resultats obtenus sont corrects.")
print("")
- msg = "Algorithme en test en MultiFonction : %s"%algo
- print(msg+"\n"+"-"*len(msg))
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1")
- adaopy.setObservationOperator(OneFunction = MultiFonction, InputFunctionAsMulti = True)
- adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
- adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa["Multi/"+algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- print("")
- msg = "Tests des ecarts attendus :"
- print(msg+"\n"+"="*len(msg))
- for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
- verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20)
- print(" Les resultats obtenus sont corrects.")
- print("")
- #
- return 0
+ return 0
# ==============================================================================
def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""):
#===============================================================================
if __name__ == "__main__":
- print('\nAUTODIAGNOSTIC\n')
- test1()
+ print("\nAUTODIAGNOSTIC\n==============")
+ unittest.main()
# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
"Verification du fonctionnement correct d'entrees en mono ou multi-fonctions"
-# ==============================================================================
-import numpy, sys
+import sys
+import unittest
+import numpy
from adao import adaoBuilder
+# ==============================================================================
+
M = numpy.matrix("1 0 0;0 2 0;0 0 3")
def MonoFonction( x ):
return M * numpy.asmatrix(numpy.ravel( x )).T
return _mulHX
# ==============================================================================
-def test1():
- """
- Verification du fonctionnement identique pour les algorithmes autres
- en utilisant une fonction lineaire et carree
- """
- print(test1.__doc__)
- Xa = {}
- #
- for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
- print("")
- msg = "Algorithme en test en MonoFonction : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+class InTest(unittest.TestCase):
+ def test1(self):
+ """
+ Verification du fonctionnement identique pour les algorithmes autres
+ en utilisant une fonction lineaire et carree
+ """
+ print(self.test1.__doc__)
+ Xa = {}
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
- adaopy.setObservationOperator(OneFunction = MonoFonction)
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa["Mono/"+algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
+ for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
+ print("")
+ msg = "Algorithme en test en MonoFonction : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
+ adaopy.setObservationOperator(OneFunction = MonoFonction)
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa["Mono/"+algo] = adaopy.get("Analysis")[-1]
+ del adaopy
+ #
+ for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
+ print("")
+ msg = "Algorithme en test en MultiFonction : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
+ adaopy.setObservationOperator(OneFunction = MultiFonction, InputFunctionAsMulti = True)
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa["Multi/"+algo] = adaopy.get("Analysis")[-1]
+ del adaopy
+ #
+ print("")
+ msg = "Tests des ecarts attendus :"
+ print(msg+"\n"+"="*len(msg))
+ for algo in ("ParticleSwarmOptimization", "QuantileRegression"):
+ verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20)
+ print(" Les resultats obtenus sont corrects.")
print("")
- msg = "Algorithme en test en MultiFonction : %s"%algo
- print(msg+"\n"+"-"*len(msg))
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3")
- adaopy.setObservationOperator(OneFunction = MultiFonction, InputFunctionAsMulti = True)
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa["Multi/"+algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- print("")
- msg = "Tests des ecarts attendus :"
- print(msg+"\n"+"="*len(msg))
- for algo in ("ParticleSwarmOptimization", "QuantileRegression"):
- verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20)
- print(" Les resultats obtenus sont corrects.")
- print("")
- #
- return 0
+ return 0
# ==============================================================================
def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""):
#===============================================================================
if __name__ == "__main__":
- print('\nAUTODIAGNOSTIC\n')
- test1()
+ print("\nAUTODIAGNOSTIC\n==============")
+ unittest.main()
# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
"Verification du fonctionnement correct d'entrees en mono ou multi-fonctions"
-# ==============================================================================
-import numpy, sys
+import sys
+import unittest
+import numpy
from adao import adaoBuilder
+# ==============================================================================
+
def ElementaryFunction01( InputArgument ):
"""
Exemple de fonction non-lineaire et non-carree
return _ySerie
# ==============================================================================
-def test1():
- """
- Verification du fonctionnement identique pour les algorithmes non-temporels
- en utilisant une fonction non-lineaire et non-carree
- """
- print(test1.__doc__)
- Xa = {}
- #
- for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
- print("")
- msg = "Algorithme en test en MonoFonction : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+class InTest(unittest.TestCase):
+ def test1(self):
+ """
+ Verification du fonctionnement identique pour les algorithmes non-temporels
+ en utilisant une fonction non-lineaire et non-carree
+ """
+ print(self.test1.__doc__)
+ Xa = {}
+ #
+ for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
+ print("")
+ msg = "Algorithme en test en MonoFonction : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1")
+ adaopy.setObservationOperator(OneFunction = ElementaryFunction01)
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa["Mono/"+algo] = adaopy.get("Analysis")[-1]
+ del adaopy
+ #
+ for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
+ print("")
+ msg = "Algorithme en test en MultiFonction : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1")
+ adaopy.setObservationOperator(OneFunction = MultiFonction01, InputFunctionAsMulti = True)
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa["Multi/"+algo] = adaopy.get("Analysis")[-1]
+ del adaopy
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1")
- adaopy.setObservationOperator(OneFunction = ElementaryFunction01)
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa["Mono/"+algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
print("")
- msg = "Algorithme en test en MultiFonction : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+ msg = "Tests des ecarts attendus :"
+ print(msg+"\n"+"="*len(msg))
+ for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
+ verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20)
+ print(" Les resultats obtenus sont corrects.")
+ print("")
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1")
- adaopy.setObservationOperator(OneFunction = MultiFonction01, InputFunctionAsMulti = True)
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa["Multi/"+algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- print("")
- msg = "Tests des ecarts attendus :"
- print(msg+"\n"+"="*len(msg))
- for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
- verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20)
- print(" Les resultats obtenus sont corrects.")
- print("")
- #
- return 0
+ return 0
# ==============================================================================
def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""):
#===============================================================================
if __name__ == "__main__":
- print('\nAUTODIAGNOSTIC\n')
- test1()
+ print("\nAUTODIAGNOSTIC\n==============")
+ unittest.main()
# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
"Verification du fonctionnement correct d'entrees en mono ou multi-fonctions"
-# ==============================================================================
-import numpy, sys
+import sys
+import unittest
+import numpy
from adao import adaoBuilder
+# ==============================================================================
+
def ElementaryFunction01( InputArgument ):
"""
Exemple de fonction non-lineaire et non-carree
return _ySerie
# ==============================================================================
-def test1():
- """
- Verification du fonctionnement identique pour les algorithmes temporels
- en utilisant une fonction non-lineaire et non-carree
- """
- print(test1.__doc__)
- Xa = {}
- #
- for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
- print("")
- msg = "Algorithme en test en MonoFonction : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+class InTest(unittest.TestCase):
+ def test1(self):
+ """
+ Verification du fonctionnement identique pour les algorithmes temporels
+ en utilisant une fonction non-lineaire et non-carree
+ """
+ print(self.test1.__doc__)
+ Xa = {}
+ #
+ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+ print("")
+ msg = "Algorithme en test en MonoFonction : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1")
+ adaopy.setObservationOperator(OneFunction = ElementaryFunction01)
+ adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
+ adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa["Mono/"+algo] = adaopy.get("Analysis")[-1]
+ del adaopy
+ #
+ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+ print("")
+ msg = "Algorithme en test en MultiFonction : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1")
+ adaopy.setObservationOperator(OneFunction = MultiFonction01, InputFunctionAsMulti = True)
+ adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
+ adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa["Multi/"+algo] = adaopy.get("Analysis")[-1]
+ del adaopy
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1")
- adaopy.setObservationOperator(OneFunction = ElementaryFunction01)
- adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
- adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa["Mono/"+algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
print("")
- msg = "Algorithme en test en MultiFonction : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+ msg = "Tests des ecarts attendus :"
+ print(msg+"\n"+"="*len(msg))
+ for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
+ verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20)
+ print(" Les resultats obtenus sont corrects.")
+ print("")
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1")
- adaopy.setObservationOperator(OneFunction = MultiFonction01, InputFunctionAsMulti = True)
- adaopy.setEvolutionError (ScalarSparseMatrix = 1.)
- adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1")
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa["Multi/"+algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- print("")
- msg = "Tests des ecarts attendus :"
- print(msg+"\n"+"="*len(msg))
- for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
- verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20)
- print(" Les resultats obtenus sont corrects.")
- print("")
- #
- return 0
+ return 0
# ==============================================================================
def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""):
#===============================================================================
if __name__ == "__main__":
- print('\nAUTODIAGNOSTIC\n')
- test1()
+ print("\nAUTODIAGNOSTIC\n==============")
+ unittest.main()
# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
"Verification du fonctionnement correct d'entrees en mono ou multi-fonctions"
-# ==============================================================================
-import numpy, sys
+import sys
+import unittest
+import numpy
from adao import adaoBuilder
+# ==============================================================================
+
def ElementaryFunction01( InputArgument ):
"""
Exemple de fonction non-lineaire et non-carree
return _ySerie
# ==============================================================================
-def test1():
- """
- Verification du fonctionnement identique pour les algorithmes autres
- en utilisant une fonction non-lineaire et non-carree
- """
- print(test1.__doc__)
- Xa = {}
- #
- for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
- print("")
- msg = "Algorithme en test en MonoFonction : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+class InTest(unittest.TestCase):
+ def test1(self):
+ """
+ Verification du fonctionnement identique pour les algorithmes autres
+ en utilisant une fonction non-lineaire et non-carree
+ """
+ print(self.test1.__doc__)
+ Xa = {}
+ #
+ for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
+ print("")
+ msg = "Algorithme en test en MonoFonction : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1")
+ adaopy.setObservationOperator(OneFunction = ElementaryFunction01)
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa["Mono/"+algo] = adaopy.get("Analysis")[-1]
+ del adaopy
+ #
+ for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
+ print("")
+ msg = "Algorithme en test en MultiFonction : %s"%algo
+ print(msg+"\n"+"-"*len(msg))
+ #
+ adaopy = adaoBuilder.New()
+ adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000})
+ adaopy.setBackground (Vector = [0,1,2])
+ adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
+ adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5])
+ adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1")
+ adaopy.setObservationOperator(OneFunction = MultiFonction01, InputFunctionAsMulti = True)
+ adaopy.setObserver("Analysis",Template="ValuePrinter")
+ adaopy.execute()
+ Xa["Multi/"+algo] = adaopy.get("Analysis")[-1]
+ del adaopy
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1")
- adaopy.setObservationOperator(OneFunction = ElementaryFunction01)
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa["Mono/"+algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
print("")
- msg = "Algorithme en test en MultiFonction : %s"%algo
- print(msg+"\n"+"-"*len(msg))
+ msg = "Tests des ecarts attendus :"
+ print(msg+"\n"+"="*len(msg))
+ for algo in ("ParticleSwarmOptimization", "QuantileRegression"):
+ verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20)
+ print(" Les resultats obtenus sont corrects.")
+ print("")
#
- adaopy = adaoBuilder.New()
- adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000})
- adaopy.setBackground (Vector = [0,1,2])
- adaopy.setBackgroundError (ScalarSparseMatrix = 1.)
- adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5])
- adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1")
- adaopy.setObservationOperator(OneFunction = MultiFonction01, InputFunctionAsMulti = True)
- adaopy.setObserver("Analysis",Template="ValuePrinter")
- adaopy.execute()
- Xa["Multi/"+algo] = adaopy.get("Analysis")[-1]
- del adaopy
- #
- print("")
- msg = "Tests des ecarts attendus :"
- print(msg+"\n"+"="*len(msg))
- for algo in ("ParticleSwarmOptimization", "QuantileRegression"):
- verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20)
- print(" Les resultats obtenus sont corrects.")
- print("")
- #
- return 0
+ return 0
# ==============================================================================
def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""):
#===============================================================================
if __name__ == "__main__":
- print('\nAUTODIAGNOSTIC\n')
- test1()
+ print("\nAUTODIAGNOSTIC\n==============")
+ unittest.main()
--- /dev/null
+# Copyright (C) 2008-2019 EDF R&D
+#
+# This file is part of SALOME ADAO module
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+SET(TEST_NAMES
+ Definition_complete_de_cas_3DVAR
+ )
+
+FOREACH(tfile ${TEST_NAMES})
+ SET(TEST_NAME ADAO_${tfile})
+ ADD_TEST(${TEST_NAME} python ${tfile}.py)
+ #ADD_TEST(${TEST_NAME} python ${SALOME_TEST_DRIVER} ${TIMEOUT} ${tfile}.py)
+ SET_TESTS_PROPERTIES(${TEST_NAME} PROPERTIES LABELS "${COMPONENT_NAME}")
+ENDFOREACH()
--- /dev/null
+# -*- coding: utf-8 -*-
+#
+# Copyright (C) 2008-2019 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
+"Verification d'un exemple de la documentation"
+
+import sys
+import unittest
+import numpy
+from utExtend import assertAlmostEqualArrays
+
+# ==============================================================================
+#
+# Construction artificielle d'un exemple de donnees utilisateur
+# -------------------------------------------------------------
+alpha = 5.
+beta = 7
+gamma = 9.0
+#
+alphamin, alphamax = 0., 10.
+betamin, betamax = 3, 13
+gammamin, gammamax = 1.5, 15.5
+#
+def simulation(x):
+ "Fonction de simulation H pour effectuer Y=H(X)"
+ import numpy
+ __x = numpy.matrix(numpy.ravel(numpy.matrix(x))).T
+ __H = numpy.matrix("1 0 0;0 2 0;0 0 3; 1 2 3")
+ return __H * __x
+#
+def multisimulation( xserie ):
+ yserie = []
+ for x in xserie:
+ yserie.append( simulation( x ) )
+ return yserie
+#
+# Observations obtenues par simulation
+# ------------------------------------
+observations = simulation((2, 3, 4))
+
+# ==============================================================================
+class InTest(unittest.TestCase):
+ def test1(self):
+ print("""Exemple de la doc :
+
+ Exploitation independante des resultats d'un cas de calcul
+ ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+ """)
+ #
+ import numpy
+ from adao import adaoBuilder
+ #
+ # Mise en forme des entrees
+ # -------------------------
+ Xb = (alpha, beta, gamma)
+ Bounds = (
+ (alphamin, alphamax),
+ (betamin, betamax ),
+ (gammamin, gammamax))
+ #
+ # TUI ADAO
+ # --------
+ case = adaoBuilder.New()
+ case.set( 'AlgorithmParameters',
+ Algorithm = '3DVAR', # Mots-clé réservé
+ Parameters = { # Dictionnaire
+ "Bounds":Bounds, # Liste de paires de Real ou de None
+ "MaximumNumberOfSteps":100, # Int >= 0
+ "CostDecrementTolerance":1.e-7, # Real > 0
+ "StoreSupplementaryCalculations":[# Liste de mots-clés réservés
+ "CostFunctionJAtCurrentOptimum",
+ "CostFunctionJoAtCurrentOptimum",
+ "CurrentOptimum",
+ "SimulatedObservationAtCurrentOptimum",
+ "SimulatedObservationAtOptimum",
+ ],
+ }
+ )
+ case.set( 'Background',
+ Vector = numpy.array(Xb), # array, list, tuple, matrix
+ Stored = True, # Bool
+ )
+ case.set( 'Observation',
+ Vector = numpy.array(observations), # array, list, tuple, matrix
+ Stored = False, # Bool
+ )
+ case.set( 'BackgroundError',
+ Matrix = None, # None ou matrice carrée
+ ScalarSparseMatrix = 1.0e10, # None ou Real > 0
+ DiagonalSparseMatrix = None, # None ou vecteur
+ )
+ case.set( 'ObservationError',
+ Matrix = None, # None ou matrice carrée
+ ScalarSparseMatrix = 1.0, # None ou Real > 0
+ DiagonalSparseMatrix = None, # None ou vecteur
+ )
+ case.set( 'ObservationOperator',
+ OneFunction = multisimulation, # MultiFonction [Y] = F([X])
+ Parameters = { # Dictionnaire
+ "DifferentialIncrement":0.0001, # Real > 0
+ "CenteredFiniteDifference":False, # Bool
+ },
+ InputFunctionAsMulti = True, # Bool
+ )
+ case.set( 'Observer',
+ Variable = "CurrentState", # Mot-clé
+ Template = "ValuePrinter", # Mot-clé
+ String = None, # None ou code Python
+ Info = None, # None ou string
+
+ )
+ case.execute()
+ #
+ # Exploitation independante
+ # -------------------------
+ Xbackground = case.get("Background")
+ Xoptimum = case.get("Analysis")[-1]
+ FX_at_optimum = case.get("SimulatedObservationAtOptimum")[-1]
+ J_values = case.get("CostFunctionJAtCurrentOptimum")[:]
+ print("")
+ print("Number of internal iterations...: %i"%len(J_values))
+ print("Initial state...................: %s"%(numpy.ravel(Xbackground),))
+ print("Optimal state...................: %s"%(numpy.ravel(Xoptimum),))
+ print("Simulation at optimal state.....: %s"%(numpy.ravel(FX_at_optimum),))
+ print("")
+ #
+ ecart = assertAlmostEqualArrays(Xoptimum, [ 2., 3., 4.])
+ #
+ print(" L'écart absolu maximal obtenu lors du test est de %.2e."%ecart)
+ print(" Les résultats obtenus sont corrects.")
+ print("")
+ #
+ return Xoptimum
+
+# ==============================================================================
+if __name__ == '__main__':
+ print("\nAUTODIAGNOSTIC\n==============")
+ unittest.main()