From: Jean-Philippe ARGAUD Date: Thu, 31 Jan 2019 20:34:13 +0000 (+0100) Subject: Updating test with unittest and extended user case definition X-Git-Tag: V9_3_0.1-prealpha1~4 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=db228195fd6aa89263c5fca4356d2c1536f0223a;p=modules%2Fadao.git Updating test with unittest and extended user case definition --- diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index 5d7352f..d600275 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -47,6 +47,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): default = 1.e-7, typecast = float, message = "Diminution relative minimale du coût lors de l'arrêt", + minval = 0., ) self.defineRequiredParameter( name = "ProjectedGradientTolerance", @@ -60,6 +61,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): default = 1.e-05, typecast = float, message = "Maximum des composantes du gradient lors de l'arrêt", + minval = 0., ) self.defineRequiredParameter( name = "StoreInternalVariables", @@ -342,6 +344,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): L = numpy.linalg.cholesky( A ) except: raise ValueError("The %s a posteriori covariance matrix A is not symmetric positive-definite. Please check your a priori covariances and your observation operator."%(self._name,)) + if self._toStore("APosterioriCovariance"): self.StoredVariables["APosterioriCovariance"].store( A ) # # Calculs et/ou stockages supplémentaires diff --git a/src/daComposant/daAlgorithms/4DVAR.py b/src/daComposant/daAlgorithms/4DVAR.py index aeb6d96..1e2fc5b 100644 --- a/src/daComposant/daAlgorithms/4DVAR.py +++ b/src/daComposant/daAlgorithms/4DVAR.py @@ -61,6 +61,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): default = 1.e-7, typecast = float, message = "Diminution relative minimale du coût lors de l'arrêt", + minval = 0., ) self.defineRequiredParameter( name = "ProjectedGradientTolerance", @@ -74,6 +75,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): default = 1.e-05, typecast = float, message = "Maximum des composantes du gradient lors de l'arrêt", + minval = 0., ) self.defineRequiredParameter( name = "StoreInternalVariables", @@ -88,15 +90,15 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): message = "Liste de calculs supplémentaires à stocker et/ou effectuer", listval = [ "BMA", - "CurrentState", "CostFunctionJ", - "CostFunctionJb", - "CostFunctionJo", - "IndexOfOptimum", - "CurrentOptimum", "CostFunctionJAtCurrentOptimum", + "CostFunctionJb", "CostFunctionJbAtCurrentOptimum", + "CostFunctionJo", "CostFunctionJoAtCurrentOptimum", + "CurrentOptimum", + "CurrentState", + "IndexOfOptimum", ] ) self.defineRequiredParameter( # Pas de type diff --git a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py index cc73276..c1e76ac 100644 --- a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py @@ -47,6 +47,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): default = 1.e-7, typecast = float, message = "Diminution relative minimale du coût lors de l'arrêt", + minval = 0., ) self.defineRequiredParameter( name = "ProjectedGradientTolerance", @@ -60,6 +61,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): default = 1.e-05, typecast = float, message = "Maximum des composantes du gradient lors de l'arrêt", + minval = 0., ) self.defineRequiredParameter( name = "StoreInternalVariables", @@ -74,23 +76,23 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): message = "Liste de calculs supplémentaires à stocker et/ou effectuer", listval = [ "BMA", - "OMA", - "OMB", "CostFunctionJ", + "CostFunctionJAtCurrentOptimum", "CostFunctionJb", + "CostFunctionJbAtCurrentOptimum", "CostFunctionJo", - "CurrentState", + "CostFunctionJoAtCurrentOptimum", "CurrentOptimum", + "CurrentState", "IndexOfOptimum", "Innovation", "InnovationAtCurrentState", - "CostFunctionJAtCurrentOptimum", - "CostFunctionJbAtCurrentOptimum", - "CostFunctionJoAtCurrentOptimum", + "OMA", + "OMB", "SimulatedObservationAtBackground", + "SimulatedObservationAtCurrentOptimum", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum", - "SimulatedObservationAtCurrentOptimum", ] ) self.defineRequiredParameter( # Pas de type diff --git a/src/daComposant/daCore/BasicObjects.py b/src/daComposant/daCore/BasicObjects.py index b5d8a31..fe10247 100644 --- a/src/daComposant/daCore/BasicObjects.py +++ b/src/daComposant/daCore/BasicObjects.py @@ -581,8 +581,9 @@ class Algorithm(object): - IndexOfOptimum : index de l'état optimal courant lors d'itérations - Innovation : l'innovation : d = Y - H(X) - InnovationAtCurrentState : l'innovation à l'état courant : dn = Y - H(Xn) - - JacobianMatrixAtBackground : matrice jacobienne à l'ébauche + - JacobianMatrixAtCurrentState : matrice jacobienne à l'état courant - JacobianMatrixAtOptimum : matrice jacobienne à l'optimum + - KalmanGainAtOptimum : gain de Kalman à l'optimum - MahalanobisConsistency : indicateur de consistance des covariances - OMA : Observation moins Analyse : Y - Xa - OMB : Observation moins Background : Y - Xb @@ -627,8 +628,9 @@ class Algorithm(object): self.StoredVariables["IndexOfOptimum"] = Persistence.OneIndex(name = "IndexOfOptimum") self.StoredVariables["Innovation"] = Persistence.OneVector(name = "Innovation") self.StoredVariables["InnovationAtCurrentState"] = Persistence.OneVector(name = "InnovationAtCurrentState") - self.StoredVariables["JacobianMatrixAtBackground"] = Persistence.OneMatrix(name = "JacobianMatrixAtBackground") + self.StoredVariables["JacobianMatrixAtCurrentState"] = Persistence.OneMatrix(name = "JacobianMatrixAtCurrentState") self.StoredVariables["JacobianMatrixAtOptimum"] = Persistence.OneMatrix(name = "JacobianMatrixAtOptimum") + self.StoredVariables["KalmanGainAtOptimum"] = Persistence.OneMatrix(name = "KalmanGainAtOptimum") self.StoredVariables["MahalanobisConsistency"] = Persistence.OneScalar(name = "MahalanobisConsistency") self.StoredVariables["OMA"] = Persistence.OneVector(name = "OMA") self.StoredVariables["OMB"] = Persistence.OneVector(name = "OMB") diff --git a/test/CTestTestfileInstall.cmake.in b/test/CTestTestfileInstall.cmake.in index a7eadf2..da05965 100644 --- a/test/CTestTestfileInstall.cmake.in +++ b/test/CTestTestfileInstall.cmake.in @@ -36,4 +36,5 @@ SUBDIRS( test6901 test6902 test6903 + test6904 ) diff --git a/test/Makefile.am b/test/Makefile.am index 59e568e..62da496 100644 --- a/test/Makefile.am +++ b/test/Makefile.am @@ -39,6 +39,7 @@ install-data-local: cp -R $(DIR)test6901 $(SALOMETESTDIR) cp -R $(DIR)test6902 $(SALOMETESTDIR) cp -R $(DIR)test6903 $(SALOMETESTDIR) + cp -R $(DIR)test6904 $(SALOMETESTDIR) cp $(DIR)CTestTestfileInstall.cmake.in $(SALOMETESTDIR)/CTestTestfile.cmake uninstall-local: @@ -53,4 +54,5 @@ uninstall-local: rm -rf $(SALOMETESTDIR)/test6901 rm -rf $(SALOMETESTDIR)/test6902 rm -rf $(SALOMETESTDIR)/test6903 + rm -rf $(SALOMETESTDIR)/test6904 rm $(SALOMETESTDIR)/CTestTestfile.cmake diff --git a/test/test6701/utExtend.py b/test/test6701/utExtend.py deleted file mode 100644 index 5bf358a..0000000 --- a/test/test6701/utExtend.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2008-2019 EDF R&D -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -# -# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com -# -# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D - -""" - Unittest extensions for Numpy objects -""" -__author__ = "Jean-Philippe ARGAUD" -__all__ = ["assertAlmostEqualVector"] - -import numpy - -# ============================================================================== -def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None): - "Compare two vectors, like unittest.assertAlmostEqual" - if msg is not None: - print(msg) - if delta is not None: - if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any(): - raise AssertionError("%s != %s within %s places"%(first,second,delta)) - else: - if ( (numpy.asarray(first) - numpy.asarray(second)) > 10**(-int(places)) ).any(): - raise AssertionError("%s != %s within %i places"%(first,second,places)) - return max(abs(numpy.asarray(first) - numpy.asarray(second))) diff --git a/test/test6702/utExtend.py b/test/test6702/utExtend.py deleted file mode 100644 index 5bf358a..0000000 --- a/test/test6702/utExtend.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2008-2019 EDF R&D -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -# -# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com -# -# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D - -""" - Unittest extensions for Numpy objects -""" -__author__ = "Jean-Philippe ARGAUD" -__all__ = ["assertAlmostEqualVector"] - -import numpy - -# ============================================================================== -def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None): - "Compare two vectors, like unittest.assertAlmostEqual" - if msg is not None: - print(msg) - if delta is not None: - if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any(): - raise AssertionError("%s != %s within %s places"%(first,second,delta)) - else: - if ( (numpy.asarray(first) - numpy.asarray(second)) > 10**(-int(places)) ).any(): - raise AssertionError("%s != %s within %i places"%(first,second,places)) - return max(abs(numpy.asarray(first) - numpy.asarray(second))) diff --git a/test/test6703/utExtend.py b/test/test6703/utExtend.py deleted file mode 100644 index 5bf358a..0000000 --- a/test/test6703/utExtend.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2008-2019 EDF R&D -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -# -# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com -# -# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D - -""" - Unittest extensions for Numpy objects -""" -__author__ = "Jean-Philippe ARGAUD" -__all__ = ["assertAlmostEqualVector"] - -import numpy - -# ============================================================================== -def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None): - "Compare two vectors, like unittest.assertAlmostEqual" - if msg is not None: - print(msg) - if delta is not None: - if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any(): - raise AssertionError("%s != %s within %s places"%(first,second,delta)) - else: - if ( (numpy.asarray(first) - numpy.asarray(second)) > 10**(-int(places)) ).any(): - raise AssertionError("%s != %s within %i places"%(first,second,places)) - return max(abs(numpy.asarray(first) - numpy.asarray(second))) diff --git a/test/test6704/utExtend.py b/test/test6704/utExtend.py deleted file mode 100644 index 5bf358a..0000000 --- a/test/test6704/utExtend.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2008-2019 EDF R&D -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -# -# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com -# -# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D - -""" - Unittest extensions for Numpy objects -""" -__author__ = "Jean-Philippe ARGAUD" -__all__ = ["assertAlmostEqualVector"] - -import numpy - -# ============================================================================== -def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None): - "Compare two vectors, like unittest.assertAlmostEqual" - if msg is not None: - print(msg) - if delta is not None: - if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any(): - raise AssertionError("%s != %s within %s places"%(first,second,delta)) - else: - if ( (numpy.asarray(first) - numpy.asarray(second)) > 10**(-int(places)) ).any(): - raise AssertionError("%s != %s within %i places"%(first,second,places)) - return max(abs(numpy.asarray(first) - numpy.asarray(second))) diff --git a/test/test6711/utExtend.py b/test/test6711/utExtend.py deleted file mode 100644 index 5bf358a..0000000 --- a/test/test6711/utExtend.py +++ /dev/null @@ -1,42 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright (C) 2008-2019 EDF R&D -# -# This library is free software; you can redistribute it and/or -# modify it under the terms of the GNU Lesser General Public -# License as published by the Free Software Foundation; either -# version 2.1 of the License. -# -# This library is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this library; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -# -# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com -# -# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D - -""" - Unittest extensions for Numpy objects -""" -__author__ = "Jean-Philippe ARGAUD" -__all__ = ["assertAlmostEqualVector"] - -import numpy - -# ============================================================================== -def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None): - "Compare two vectors, like unittest.assertAlmostEqual" - if msg is not None: - print(msg) - if delta is not None: - if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any(): - raise AssertionError("%s != %s within %s places"%(first,second,delta)) - else: - if ( (numpy.asarray(first) - numpy.asarray(second)) > 10**(-int(places)) ).any(): - raise AssertionError("%s != %s within %i places"%(first,second,places)) - return max(abs(numpy.asarray(first) - numpy.asarray(second))) diff --git a/test/test6901/Verification_des_Assimilation_Algorithms.py b/test/test6901/Verification_des_Assimilation_Algorithms.py index c10ffc9..3523dcd 100644 --- a/test/test6901/Verification_des_Assimilation_Algorithms.py +++ b/test/test6901/Verification_des_Assimilation_Algorithms.py @@ -21,168 +21,172 @@ # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D "Verification de la disponibilite de l'ensemble des algorithmes" -# ============================================================================== -import numpy, sys +import sys +import unittest +import numpy from adao import adaoBuilder -def test1(): - """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur matriciel)""" - print(test1.__doc__) - Xa = {} - for algo in ("3DVAR", "Blue", "ExtendedBlue", "LinearLeastSquares", "NonLinearLeastSquares", "DerivativeFreeOptimization"): - print("") - msg = "Algorithme en test : %s"%algo - print(msg+"\n"+"-"*len(msg)) + +# ============================================================================== +class InTest(unittest.TestCase): + def test1(self): + """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur matriciel)""" + print(self.test1.__doc__) + Xa = {} + for algo in ("3DVAR", "Blue", "ExtendedBlue", "LinearLeastSquares", "NonLinearLeastSquares", "DerivativeFreeOptimization"): + print("") + msg = "Algorithme en test : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") + adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3") + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa[algo] = adaopy.get("Analysis")[-1] + del adaopy # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") - adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3") - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa[algo] = adaopy.get("Analysis")[-1] - del adaopy - # - for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "EnsembleKalmanFilter", "4DVAR"): - print("") - msg = "Algorithme en test : %s"%algo - print(msg+"\n"+"-"*len(msg)) + for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "EnsembleKalmanFilter", "4DVAR"): + print("") + msg = "Algorithme en test : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") + adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3") + adaopy.setEvolutionError (ScalarSparseMatrix = 1.) + adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1") + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa[algo] = adaopy.get("Analysis")[-1] + del adaopy # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") - adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3") - adaopy.setEvolutionError (ScalarSparseMatrix = 1.) - adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1") - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa[algo] = adaopy.get("Analysis")[-1] - del adaopy - # - for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): - print("") - msg = "Algorithme en test : %s"%algo - print(msg+"\n"+"-"*len(msg)) + for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): + print("") + msg = "Algorithme en test : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3") + adaopy.setObservationOperator(Matrix = "1 0 0;0 1 0;0 0 1") + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa[algo] = adaopy.get("Analysis")[-1] + del adaopy # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3") - adaopy.setObservationOperator(Matrix = "1 0 0;0 1 0;0 0 1") - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa[algo] = adaopy.get("Analysis")[-1] - del adaopy - # - for algo in ("EnsembleBlue", ): + for algo in ("EnsembleBlue", ): + print("") + msg = "Algorithme en test : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"SetSeed":1000, }) + adaopy.setBackground (VectorSerie = 100*[[0,1,2]]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3") + adaopy.setObservationOperator(Matrix = "1 0 0;0 1 0;0 0 1") + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa[algo] = adaopy.get("Analysis")[-1] + del adaopy + # + print("") + msg = "Tests des ecarts attendus :" + print(msg+"\n"+"="*len(msg)) + verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa, 5.e-5) + verify_similarity_of_algo_results(("LinearLeastSquares", "NonLinearLeastSquares"), Xa, 5.e-7) + verify_similarity_of_algo_results(("KalmanFilter", "ExtendedKalmanFilter", "UnscentedKalmanFilter"), Xa, 1.e-14) + verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 5.e-2) + print(" Les resultats obtenus sont corrects.") print("") - msg = "Algorithme en test : %s"%algo - print(msg+"\n"+"-"*len(msg)) # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"SetSeed":1000, }) - adaopy.setBackground (VectorSerie = 100*[[0,1,2]]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3") - adaopy.setObservationOperator(Matrix = "1 0 0;0 1 0;0 0 1") - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa[algo] = adaopy.get("Analysis")[-1] - del adaopy - # - print("") - msg = "Tests des ecarts attendus :" - print(msg+"\n"+"="*len(msg)) - verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa, 5.e-5) - verify_similarity_of_algo_results(("LinearLeastSquares", "NonLinearLeastSquares"), Xa, 5.e-7) - verify_similarity_of_algo_results(("KalmanFilter", "ExtendedKalmanFilter", "UnscentedKalmanFilter"), Xa, 1.e-14) - verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 5.e-2) - print(" Les resultats obtenus sont corrects.") - print("") - # - return 0 + return 0 -def test2(): - """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur fonctionnel)""" - print(test2.__doc__) - Xa = {} - M = numpy.matrix("1 0 0;0 2 0;0 0 3") - def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T - for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): - print("") - msg = "Algorithme en test : %s"%algo - print(msg+"\n"+"-"*len(msg)) + def test2(self): + """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur fonctionnel)""" + print(self.test2.__doc__) + Xa = {} + M = numpy.matrix("1 0 0;0 2 0;0 0 3") + def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T + for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): + print("") + msg = "Algorithme en test : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") + adaopy.setObservationOperator(OneFunction = H) + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa[algo] = adaopy.get("Analysis")[-1] + del adaopy # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") - adaopy.setObservationOperator(OneFunction = H) - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa[algo] = adaopy.get("Analysis")[-1] - del adaopy - # - M = numpy.matrix("1 0 0;0 2 0;0 0 3") - def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T - for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): - print("") - msg = "Algorithme en test : %s"%algo - print(msg+"\n"+"-"*len(msg)) + M = numpy.matrix("1 0 0;0 2 0;0 0 3") + def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T + for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): + print("") + msg = "Algorithme en test : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") + adaopy.setObservationOperator(OneFunction = H) + adaopy.setEvolutionError (ScalarSparseMatrix = 1.) + adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1") + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa[algo] = adaopy.get("Analysis")[-1] + del adaopy + # + M = numpy.matrix("1 0 0;0 1 0;0 0 1") + def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T + for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): + print("") + msg = "Algorithme en test : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3") + adaopy.setObservationOperator(OneFunction = H) + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa[algo] = adaopy.get("Analysis")[-1] + del adaopy # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") - adaopy.setObservationOperator(OneFunction = H) - adaopy.setEvolutionError (ScalarSparseMatrix = 1.) - adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1") - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa[algo] = adaopy.get("Analysis")[-1] - del adaopy - # - M = numpy.matrix("1 0 0;0 1 0;0 0 1") - def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T - for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): print("") - msg = "Algorithme en test : %s"%algo - print(msg+"\n"+"-"*len(msg)) + msg = "Tests des ecarts attendus :" + print(msg+"\n"+"="*len(msg)) + verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa, 5.e-5) + verify_similarity_of_algo_results(("KalmanFilter", "ExtendedKalmanFilter", "UnscentedKalmanFilter"), Xa, 1.e14) + verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 5.e-2) + print(" Les resultats obtenus sont corrects.") + print("") # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3") - adaopy.setObservationOperator(OneFunction = H) - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa[algo] = adaopy.get("Analysis")[-1] - del adaopy - # - print("") - msg = "Tests des ecarts attendus :" - print(msg+"\n"+"="*len(msg)) - verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa, 5.e-5) - verify_similarity_of_algo_results(("KalmanFilter", "ExtendedKalmanFilter", "UnscentedKalmanFilter"), Xa, 1.e14) - verify_similarity_of_algo_results(("KalmanFilter", "EnsembleKalmanFilter"), Xa, 5.e-2) - print(" Les resultats obtenus sont corrects.") - print("") - # - return 0 + return 0 def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""): """Comparaison de deux vecteurs""" @@ -200,6 +204,5 @@ def verify_similarity_of_algo_results(serie = [], Xa = {}, precision = 1.e-15): #=============================================================================== if __name__ == "__main__": - print('\nAUTODIAGNOSTIC\n') - test1() - test2() + print("\nAUTODIAGNOSTIC\n==============") + unittest.main() diff --git a/test/test6902/Verification_des_Checking_Algorithms.py b/test/test6902/Verification_des_Checking_Algorithms.py index 0fdb2b1..d9f3b1c 100644 --- a/test/test6902/Verification_des_Checking_Algorithms.py +++ b/test/test6902/Verification_des_Checking_Algorithms.py @@ -20,61 +20,65 @@ # # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D +import sys +import unittest +import numpy from adao import adaoBuilder # ============================================================================== -def test1(): - for algo in ("AdjointTest", "FunctionTest", "GradientTest", "LinearityTest", "TangentTest"): - print("") - msg = "Algorithme en test : %s"%algo - print(msg+"\n"+"-"*len(msg)) +class InTest(unittest.TestCase): + def test1(self): + for algo in ("AdjointTest", "FunctionTest", "GradientTest", "LinearityTest", "TangentTest"): + print("") + msg = "Algorithme en test : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10,"NumberOfRepetition":2, "SetSeed":1000}) + adaopy.setCheckingPoint (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") + adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3") + adaopy.execute() + del adaopy # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10,"NumberOfRepetition":2, "SetSeed":1000}) - adaopy.setCheckingPoint (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") - adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3") - adaopy.execute() - del adaopy - # - for algo in ("ObserverTest", ): - print("") - msg = "Algorithme en test : %s"%algo - print(msg+"\n"+"-"*len(msg)) + for algo in ("ObserverTest", ): + print("") + msg = "Algorithme en test : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo) + adaopy.setCheckingPoint (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") + adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3") + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + del adaopy # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo) - adaopy.setCheckingPoint (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") - adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3") - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - del adaopy - # - for algo in ("SamplingTest", ): - print("") - msg = "Algorithme en test : %s"%algo - print(msg+"\n"+"-"*len(msg)) - # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={ - "StoreSupplementaryCalculations":["CostFunctionJ","CurrentState",], - "SampleAsMinMaxStepHyperCube":[[-1.,1.,1.],[0,2,1],[1,3,1]], - }) - adaopy.setCheckingPoint (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") - adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3") - adaopy.setObserver ("CurrentState",Template="ValuePrinter") - adaopy.execute() - del adaopy + for algo in ("SamplingTest", ): + print("") + msg = "Algorithme en test : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={ + "StoreSupplementaryCalculations":["CostFunctionJ","CurrentState",], + "SampleAsMinMaxStepHyperCube":[[-1.,1.,1.],[0,2,1],[1,3,1]], + }) + adaopy.setCheckingPoint (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") + adaopy.setObservationOperator(Matrix = "1 0 0;0 2 0;0 0 3") + adaopy.setObserver ("CurrentState",Template="ValuePrinter") + adaopy.execute() + del adaopy -# ============================================================================== +#=============================================================================== if __name__ == "__main__": - print('\nAUTODIAGNOSTIC\n') - test1() + print("\nAUTODIAGNOSTIC\n==============") + unittest.main() diff --git a/test/test6903/Verification_des_mono_et_multi_fonctions_A.py b/test/test6903/Verification_des_mono_et_multi_fonctions_A.py index f615adb..5dffc27 100644 --- a/test/test6903/Verification_des_mono_et_multi_fonctions_A.py +++ b/test/test6903/Verification_des_mono_et_multi_fonctions_A.py @@ -21,10 +21,13 @@ # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D "Verification du fonctionnement correct d'entrees en mono ou multi-fonctions" -# ============================================================================== -import numpy, sys +import sys +import unittest +import numpy from adao import adaoBuilder +# ============================================================================== + M = numpy.matrix("1 0 0;0 2 0;0 0 3") def MonoFonction( x ): return M * numpy.asmatrix(numpy.ravel( x )).T @@ -36,64 +39,65 @@ def MultiFonction( xserie ): return _mulHX # ============================================================================== -def test1(): - """ - Verification du fonctionnement identique pour les algorithmes non-temporels - en utilisant une fonction lineaire et carree - """ - print(test1.__doc__) - Xa = {} - # - for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): - print("") - msg = "Algorithme en test en MonoFonction : %s"%algo - print(msg+"\n"+"-"*len(msg)) +class InTest(unittest.TestCase): + def test1(self): + """ + Verification du fonctionnement identique pour les algorithmes non-temporels + en utilisant une fonction lineaire et carree + """ + print(self.test1.__doc__) + Xa = {} # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") - adaopy.setObservationOperator(OneFunction = MonoFonction) - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa["Mono/"+algo] = adaopy.get("Analysis")[-1] - del adaopy - # - for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): + for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): + print("") + msg = "Algorithme en test en MonoFonction : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") + adaopy.setObservationOperator(OneFunction = MonoFonction) + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa["Mono/"+algo] = adaopy.get("Analysis")[-1] + del adaopy + # + for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): + print("") + msg = "Algorithme en test en MultiFonction : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") + adaopy.setObservationOperator(OneFunction = MultiFonction, InputFunctionAsMulti = True) + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa["Multi/"+algo] = adaopy.get("Analysis")[-1] + del adaopy + # + print("") + msg = "Tests des ecarts attendus :" + print(msg+"\n"+"="*len(msg)) + for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): + verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20) + print(" Les resultats obtenus sont corrects.") print("") - msg = "Algorithme en test en MultiFonction : %s"%algo - print(msg+"\n"+"-"*len(msg)) # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") - adaopy.setObservationOperator(OneFunction = MultiFonction, InputFunctionAsMulti = True) - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa["Multi/"+algo] = adaopy.get("Analysis")[-1] - del adaopy - # - print("") - msg = "Tests des ecarts attendus :" - print(msg+"\n"+"="*len(msg)) - for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): - verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20) - print(" Les resultats obtenus sont corrects.") - print("") - # - return 0 - + return 0 +# # ============================================================================== def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""): """Comparaison de deux vecteurs""" print(" Difference maximale %s: %.2e"%(msg, max(abs(v2 - v1)))) return max(abs(v2 - v1)) < precision - +# def verify_similarity_of_algo_results(serie = [], Xa = {}, precision = 1.e-15): print(" Comparaisons :") for algo1 in serie: @@ -102,8 +106,8 @@ def verify_similarity_of_algo_results(serie = [], Xa = {}, precision = 1.e-15): assert almost_equal_vectors( Xa[algo1], Xa[algo2], precision, "entre %s et %s "%(algo1, algo2) ) print(" Algorithmes dont les resultats sont similaires a %.0e : %s\n"%(precision, serie,)) sys.stdout.flush() - +# #=============================================================================== if __name__ == "__main__": - print('\nAUTODIAGNOSTIC\n') - test1() + print("\nAUTODIAGNOSTIC\n==============") + unittest.main() diff --git a/test/test6903/Verification_des_mono_et_multi_fonctions_B.py b/test/test6903/Verification_des_mono_et_multi_fonctions_B.py index 3f9ef9e..0240905 100644 --- a/test/test6903/Verification_des_mono_et_multi_fonctions_B.py +++ b/test/test6903/Verification_des_mono_et_multi_fonctions_B.py @@ -21,10 +21,13 @@ # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D "Verification du fonctionnement correct d'entrees en mono ou multi-fonctions" -# ============================================================================== -import numpy, sys +import sys +import unittest +import numpy from adao import adaoBuilder +# ============================================================================== + M = numpy.matrix("1 0 0;0 2 0;0 0 3") def MonoFonction( x ): return M * numpy.asmatrix(numpy.ravel( x )).T @@ -36,61 +39,62 @@ def MultiFonction( xserie ): return _mulHX # ============================================================================== -def test1(): - """ - Verification du fonctionnement identique pour les algorithmes temporels - en utilisant une fonction lineaire et carree - """ - print(test1.__doc__) - Xa = {} - # - for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): - print("") - msg = "Algorithme en test en MonoFonction : %s"%algo - print(msg+"\n"+"-"*len(msg)) +class InTest(unittest.TestCase): + def test1(self): + """ + Verification du fonctionnement identique pour les algorithmes temporels + en utilisant une fonction lineaire et carree + """ + print(self.test1.__doc__) + Xa = {} # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") - adaopy.setObservationOperator(OneFunction = MonoFonction) - adaopy.setEvolutionError (ScalarSparseMatrix = 1.) - adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1") - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa["Mono/"+algo] = adaopy.get("Analysis")[-1] - del adaopy - # - for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): + for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): + print("") + msg = "Algorithme en test en MonoFonction : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") + adaopy.setObservationOperator(OneFunction = MonoFonction) + adaopy.setEvolutionError (ScalarSparseMatrix = 1.) + adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1") + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa["Mono/"+algo] = adaopy.get("Analysis")[-1] + del adaopy + # + for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): + print("") + msg = "Algorithme en test en MultiFonction : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") + adaopy.setObservationOperator(OneFunction = MultiFonction, InputFunctionAsMulti = True) + adaopy.setEvolutionError (ScalarSparseMatrix = 1.) + adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1") + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa["Multi/"+algo] = adaopy.get("Analysis")[-1] + del adaopy + # + print("") + msg = "Tests des ecarts attendus :" + print(msg+"\n"+"="*len(msg)) + for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): + verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20) + print(" Les resultats obtenus sont corrects.") print("") - msg = "Algorithme en test en MultiFonction : %s"%algo - print(msg+"\n"+"-"*len(msg)) # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1") - adaopy.setObservationOperator(OneFunction = MultiFonction, InputFunctionAsMulti = True) - adaopy.setEvolutionError (ScalarSparseMatrix = 1.) - adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1") - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa["Multi/"+algo] = adaopy.get("Analysis")[-1] - del adaopy - # - print("") - msg = "Tests des ecarts attendus :" - print(msg+"\n"+"="*len(msg)) - for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): - verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20) - print(" Les resultats obtenus sont corrects.") - print("") - # - return 0 + return 0 # ============================================================================== def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""): @@ -109,5 +113,5 @@ def verify_similarity_of_algo_results(serie = [], Xa = {}, precision = 1.e-15): #=============================================================================== if __name__ == "__main__": - print('\nAUTODIAGNOSTIC\n') - test1() + print("\nAUTODIAGNOSTIC\n==============") + unittest.main() diff --git a/test/test6903/Verification_des_mono_et_multi_fonctions_C.py b/test/test6903/Verification_des_mono_et_multi_fonctions_C.py index c4f18c6..d705066 100644 --- a/test/test6903/Verification_des_mono_et_multi_fonctions_C.py +++ b/test/test6903/Verification_des_mono_et_multi_fonctions_C.py @@ -21,10 +21,13 @@ # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D "Verification du fonctionnement correct d'entrees en mono ou multi-fonctions" -# ============================================================================== -import numpy, sys +import sys +import unittest +import numpy from adao import adaoBuilder +# ============================================================================== + M = numpy.matrix("1 0 0;0 2 0;0 0 3") def MonoFonction( x ): return M * numpy.asmatrix(numpy.ravel( x )).T @@ -36,57 +39,58 @@ def MultiFonction( xserie ): return _mulHX # ============================================================================== -def test1(): - """ - Verification du fonctionnement identique pour les algorithmes autres - en utilisant une fonction lineaire et carree - """ - print(test1.__doc__) - Xa = {} - # - for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): - print("") - msg = "Algorithme en test en MonoFonction : %s"%algo - print(msg+"\n"+"-"*len(msg)) +class InTest(unittest.TestCase): + def test1(self): + """ + Verification du fonctionnement identique pour les algorithmes autres + en utilisant une fonction lineaire et carree + """ + print(self.test1.__doc__) + Xa = {} # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3") - adaopy.setObservationOperator(OneFunction = MonoFonction) - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa["Mono/"+algo] = adaopy.get("Analysis")[-1] - del adaopy - # - for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): + for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): + print("") + msg = "Algorithme en test en MonoFonction : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3") + adaopy.setObservationOperator(OneFunction = MonoFonction) + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa["Mono/"+algo] = adaopy.get("Analysis")[-1] + del adaopy + # + for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): + print("") + msg = "Algorithme en test en MultiFonction : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3") + adaopy.setObservationOperator(OneFunction = MultiFonction, InputFunctionAsMulti = True) + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa["Multi/"+algo] = adaopy.get("Analysis")[-1] + del adaopy + # + print("") + msg = "Tests des ecarts attendus :" + print(msg+"\n"+"="*len(msg)) + for algo in ("ParticleSwarmOptimization", "QuantileRegression"): + verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20) + print(" Les resultats obtenus sont corrects.") print("") - msg = "Algorithme en test en MultiFonction : %s"%algo - print(msg+"\n"+"-"*len(msg)) # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 2 3") - adaopy.setObservationOperator(OneFunction = MultiFonction, InputFunctionAsMulti = True) - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa["Multi/"+algo] = adaopy.get("Analysis")[-1] - del adaopy - # - print("") - msg = "Tests des ecarts attendus :" - print(msg+"\n"+"="*len(msg)) - for algo in ("ParticleSwarmOptimization", "QuantileRegression"): - verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20) - print(" Les resultats obtenus sont corrects.") - print("") - # - return 0 + return 0 # ============================================================================== def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""): @@ -105,5 +109,5 @@ def verify_similarity_of_algo_results(serie = [], Xa = {}, precision = 1.e-15): #=============================================================================== if __name__ == "__main__": - print('\nAUTODIAGNOSTIC\n') - test1() + print("\nAUTODIAGNOSTIC\n==============") + unittest.main() diff --git a/test/test6903/Verification_des_mono_et_multi_fonctions_D.py b/test/test6903/Verification_des_mono_et_multi_fonctions_D.py index bcccedf..66d1f43 100644 --- a/test/test6903/Verification_des_mono_et_multi_fonctions_D.py +++ b/test/test6903/Verification_des_mono_et_multi_fonctions_D.py @@ -21,10 +21,13 @@ # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D "Verification du fonctionnement correct d'entrees en mono ou multi-fonctions" -# ============================================================================== -import numpy, sys +import sys +import unittest +import numpy from adao import adaoBuilder +# ============================================================================== + def ElementaryFunction01( InputArgument ): """ Exemple de fonction non-lineaire et non-carree @@ -64,57 +67,58 @@ def MultiFonction01( xSerie ): return _ySerie # ============================================================================== -def test1(): - """ - Verification du fonctionnement identique pour les algorithmes non-temporels - en utilisant une fonction non-lineaire et non-carree - """ - print(test1.__doc__) - Xa = {} - # - for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): - print("") - msg = "Algorithme en test en MonoFonction : %s"%algo - print(msg+"\n"+"-"*len(msg)) +class InTest(unittest.TestCase): + def test1(self): + """ + Verification du fonctionnement identique pour les algorithmes non-temporels + en utilisant une fonction non-lineaire et non-carree + """ + print(self.test1.__doc__) + Xa = {} + # + for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): + print("") + msg = "Algorithme en test en MonoFonction : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1") + adaopy.setObservationOperator(OneFunction = ElementaryFunction01) + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa["Mono/"+algo] = adaopy.get("Analysis")[-1] + del adaopy + # + for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): + print("") + msg = "Algorithme en test en MultiFonction : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1") + adaopy.setObservationOperator(OneFunction = MultiFonction01, InputFunctionAsMulti = True) + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa["Multi/"+algo] = adaopy.get("Analysis")[-1] + del adaopy # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1") - adaopy.setObservationOperator(OneFunction = ElementaryFunction01) - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa["Mono/"+algo] = adaopy.get("Analysis")[-1] - del adaopy - # - for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): print("") - msg = "Algorithme en test en MultiFonction : %s"%algo - print(msg+"\n"+"-"*len(msg)) + msg = "Tests des ecarts attendus :" + print(msg+"\n"+"="*len(msg)) + for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): + verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20) + print(" Les resultats obtenus sont corrects.") + print("") # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1") - adaopy.setObservationOperator(OneFunction = MultiFonction01, InputFunctionAsMulti = True) - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa["Multi/"+algo] = adaopy.get("Analysis")[-1] - del adaopy - # - print("") - msg = "Tests des ecarts attendus :" - print(msg+"\n"+"="*len(msg)) - for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): - verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20) - print(" Les resultats obtenus sont corrects.") - print("") - # - return 0 + return 0 # ============================================================================== def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""): @@ -133,5 +137,5 @@ def verify_similarity_of_algo_results(serie = [], Xa = {}, precision = 1.e-15): #=============================================================================== if __name__ == "__main__": - print('\nAUTODIAGNOSTIC\n') - test1() + print("\nAUTODIAGNOSTIC\n==============") + unittest.main() diff --git a/test/test6903/Verification_des_mono_et_multi_fonctions_E.py b/test/test6903/Verification_des_mono_et_multi_fonctions_E.py index e5324e5..66dd89d 100644 --- a/test/test6903/Verification_des_mono_et_multi_fonctions_E.py +++ b/test/test6903/Verification_des_mono_et_multi_fonctions_E.py @@ -21,10 +21,13 @@ # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D "Verification du fonctionnement correct d'entrees en mono ou multi-fonctions" -# ============================================================================== -import numpy, sys +import sys +import unittest +import numpy from adao import adaoBuilder +# ============================================================================== + def ElementaryFunction01( InputArgument ): """ Exemple de fonction non-lineaire et non-carree @@ -64,61 +67,62 @@ def MultiFonction01( xSerie ): return _ySerie # ============================================================================== -def test1(): - """ - Verification du fonctionnement identique pour les algorithmes temporels - en utilisant une fonction non-lineaire et non-carree - """ - print(test1.__doc__) - Xa = {} - # - for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): - print("") - msg = "Algorithme en test en MonoFonction : %s"%algo - print(msg+"\n"+"-"*len(msg)) +class InTest(unittest.TestCase): + def test1(self): + """ + Verification du fonctionnement identique pour les algorithmes temporels + en utilisant une fonction non-lineaire et non-carree + """ + print(self.test1.__doc__) + Xa = {} + # + for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): + print("") + msg = "Algorithme en test en MonoFonction : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1") + adaopy.setObservationOperator(OneFunction = ElementaryFunction01) + adaopy.setEvolutionError (ScalarSparseMatrix = 1.) + adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1") + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa["Mono/"+algo] = adaopy.get("Analysis")[-1] + del adaopy + # + for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): + print("") + msg = "Algorithme en test en MultiFonction : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1") + adaopy.setObservationOperator(OneFunction = MultiFonction01, InputFunctionAsMulti = True) + adaopy.setEvolutionError (ScalarSparseMatrix = 1.) + adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1") + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa["Multi/"+algo] = adaopy.get("Analysis")[-1] + del adaopy # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1") - adaopy.setObservationOperator(OneFunction = ElementaryFunction01) - adaopy.setEvolutionError (ScalarSparseMatrix = 1.) - adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1") - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa["Mono/"+algo] = adaopy.get("Analysis")[-1] - del adaopy - # - for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): print("") - msg = "Algorithme en test en MultiFonction : %s"%algo - print(msg+"\n"+"-"*len(msg)) + msg = "Tests des ecarts attendus :" + print(msg+"\n"+"="*len(msg)) + for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): + verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20) + print(" Les resultats obtenus sont corrects.") + print("") # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "SetSeed":1000}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1") - adaopy.setObservationOperator(OneFunction = MultiFonction01, InputFunctionAsMulti = True) - adaopy.setEvolutionError (ScalarSparseMatrix = 1.) - adaopy.setEvolutionModel (Matrix = "1 0 0;0 1 0;0 0 1") - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa["Multi/"+algo] = adaopy.get("Analysis")[-1] - del adaopy - # - print("") - msg = "Tests des ecarts attendus :" - print(msg+"\n"+"="*len(msg)) - for algo in ("ExtendedKalmanFilter", "KalmanFilter", "EnsembleKalmanFilter", "UnscentedKalmanFilter", "4DVAR"): - verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20) - print(" Les resultats obtenus sont corrects.") - print("") - # - return 0 + return 0 # ============================================================================== def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""): @@ -137,5 +141,5 @@ def verify_similarity_of_algo_results(serie = [], Xa = {}, precision = 1.e-15): #=============================================================================== if __name__ == "__main__": - print('\nAUTODIAGNOSTIC\n') - test1() + print("\nAUTODIAGNOSTIC\n==============") + unittest.main() diff --git a/test/test6903/Verification_des_mono_et_multi_fonctions_F.py b/test/test6903/Verification_des_mono_et_multi_fonctions_F.py index 0cc31e8..98d3da8 100644 --- a/test/test6903/Verification_des_mono_et_multi_fonctions_F.py +++ b/test/test6903/Verification_des_mono_et_multi_fonctions_F.py @@ -21,10 +21,13 @@ # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D "Verification du fonctionnement correct d'entrees en mono ou multi-fonctions" -# ============================================================================== -import numpy, sys +import sys +import unittest +import numpy from adao import adaoBuilder +# ============================================================================== + def ElementaryFunction01( InputArgument ): """ Exemple de fonction non-lineaire et non-carree @@ -64,57 +67,58 @@ def MultiFonction01( xSerie ): return _ySerie # ============================================================================== -def test1(): - """ - Verification du fonctionnement identique pour les algorithmes autres - en utilisant une fonction non-lineaire et non-carree - """ - print(test1.__doc__) - Xa = {} - # - for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): - print("") - msg = "Algorithme en test en MonoFonction : %s"%algo - print(msg+"\n"+"-"*len(msg)) +class InTest(unittest.TestCase): + def test1(self): + """ + Verification du fonctionnement identique pour les algorithmes autres + en utilisant une fonction non-lineaire et non-carree + """ + print(self.test1.__doc__) + Xa = {} + # + for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): + print("") + msg = "Algorithme en test en MonoFonction : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1") + adaopy.setObservationOperator(OneFunction = ElementaryFunction01) + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa["Mono/"+algo] = adaopy.get("Analysis")[-1] + del adaopy + # + for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): + print("") + msg = "Algorithme en test en MultiFonction : %s"%algo + print(msg+"\n"+"-"*len(msg)) + # + adaopy = adaoBuilder.New() + adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000}) + adaopy.setBackground (Vector = [0,1,2]) + adaopy.setBackgroundError (ScalarSparseMatrix = 1.) + adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5]) + adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1") + adaopy.setObservationOperator(OneFunction = MultiFonction01, InputFunctionAsMulti = True) + adaopy.setObserver("Analysis",Template="ValuePrinter") + adaopy.execute() + Xa["Multi/"+algo] = adaopy.get("Analysis")[-1] + del adaopy # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1") - adaopy.setObservationOperator(OneFunction = ElementaryFunction01) - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa["Mono/"+algo] = adaopy.get("Analysis")[-1] - del adaopy - # - for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): print("") - msg = "Algorithme en test en MultiFonction : %s"%algo - print(msg+"\n"+"-"*len(msg)) + msg = "Tests des ecarts attendus :" + print(msg+"\n"+"="*len(msg)) + for algo in ("ParticleSwarmOptimization", "QuantileRegression"): + verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20) + print(" Les resultats obtenus sont corrects.") + print("") # - adaopy = adaoBuilder.New() - adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000}) - adaopy.setBackground (Vector = [0,1,2]) - adaopy.setBackgroundError (ScalarSparseMatrix = 1.) - adaopy.setObservation (Vector = [0.5,1.5,2.5,0.5,1.5,2.5,0.5,1.5,2.5]) - adaopy.setObservationError (DiagonalSparseMatrix = "1 1 1 1 1 1 1 1 1") - adaopy.setObservationOperator(OneFunction = MultiFonction01, InputFunctionAsMulti = True) - adaopy.setObserver("Analysis",Template="ValuePrinter") - adaopy.execute() - Xa["Multi/"+algo] = adaopy.get("Analysis")[-1] - del adaopy - # - print("") - msg = "Tests des ecarts attendus :" - print(msg+"\n"+"="*len(msg)) - for algo in ("ParticleSwarmOptimization", "QuantileRegression"): - verify_similarity_of_algo_results(("Multi/"+algo, "Mono/"+algo), Xa, 1.e-20) - print(" Les resultats obtenus sont corrects.") - print("") - # - return 0 + return 0 # ============================================================================== def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""): @@ -133,5 +137,5 @@ def verify_similarity_of_algo_results(serie = [], Xa = {}, precision = 1.e-15): #=============================================================================== if __name__ == "__main__": - print('\nAUTODIAGNOSTIC\n') - test1() + print("\nAUTODIAGNOSTIC\n==============") + unittest.main() diff --git a/test/test6904/CTestTestfile.cmake b/test/test6904/CTestTestfile.cmake new file mode 100644 index 0000000..0b0a3d1 --- /dev/null +++ b/test/test6904/CTestTestfile.cmake @@ -0,0 +1,31 @@ +# Copyright (C) 2008-2019 EDF R&D +# +# This file is part of SALOME ADAO module +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# +# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com +# + +SET(TEST_NAMES + Definition_complete_de_cas_3DVAR + ) + +FOREACH(tfile ${TEST_NAMES}) + SET(TEST_NAME ADAO_${tfile}) + ADD_TEST(${TEST_NAME} python ${tfile}.py) + #ADD_TEST(${TEST_NAME} python ${SALOME_TEST_DRIVER} ${TIMEOUT} ${tfile}.py) + SET_TESTS_PROPERTIES(${TEST_NAME} PROPERTIES LABELS "${COMPONENT_NAME}") +ENDFOREACH() diff --git a/test/test6904/Definition_complete_de_cas_3DVAR.py b/test/test6904/Definition_complete_de_cas_3DVAR.py new file mode 100644 index 0000000..9785127 --- /dev/null +++ b/test/test6904/Definition_complete_de_cas_3DVAR.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2008-2019 EDF R&D +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# +# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com +# +# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D +"Verification d'un exemple de la documentation" + +import sys +import unittest +import numpy +from utExtend import assertAlmostEqualArrays + +# ============================================================================== +# +# Construction artificielle d'un exemple de donnees utilisateur +# ------------------------------------------------------------- +alpha = 5. +beta = 7 +gamma = 9.0 +# +alphamin, alphamax = 0., 10. +betamin, betamax = 3, 13 +gammamin, gammamax = 1.5, 15.5 +# +def simulation(x): + "Fonction de simulation H pour effectuer Y=H(X)" + import numpy + __x = numpy.matrix(numpy.ravel(numpy.matrix(x))).T + __H = numpy.matrix("1 0 0;0 2 0;0 0 3; 1 2 3") + return __H * __x +# +def multisimulation( xserie ): + yserie = [] + for x in xserie: + yserie.append( simulation( x ) ) + return yserie +# +# Observations obtenues par simulation +# ------------------------------------ +observations = simulation((2, 3, 4)) + +# ============================================================================== +class InTest(unittest.TestCase): + def test1(self): + print("""Exemple de la doc : + + Exploitation independante des resultats d'un cas de calcul + ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + """) + # + import numpy + from adao import adaoBuilder + # + # Mise en forme des entrees + # ------------------------- + Xb = (alpha, beta, gamma) + Bounds = ( + (alphamin, alphamax), + (betamin, betamax ), + (gammamin, gammamax)) + # + # TUI ADAO + # -------- + case = adaoBuilder.New() + case.set( 'AlgorithmParameters', + Algorithm = '3DVAR', # Mots-clé réservé + Parameters = { # Dictionnaire + "Bounds":Bounds, # Liste de paires de Real ou de None + "MaximumNumberOfSteps":100, # Int >= 0 + "CostDecrementTolerance":1.e-7, # Real > 0 + "StoreSupplementaryCalculations":[# Liste de mots-clés réservés + "CostFunctionJAtCurrentOptimum", + "CostFunctionJoAtCurrentOptimum", + "CurrentOptimum", + "SimulatedObservationAtCurrentOptimum", + "SimulatedObservationAtOptimum", + ], + } + ) + case.set( 'Background', + Vector = numpy.array(Xb), # array, list, tuple, matrix + Stored = True, # Bool + ) + case.set( 'Observation', + Vector = numpy.array(observations), # array, list, tuple, matrix + Stored = False, # Bool + ) + case.set( 'BackgroundError', + Matrix = None, # None ou matrice carrée + ScalarSparseMatrix = 1.0e10, # None ou Real > 0 + DiagonalSparseMatrix = None, # None ou vecteur + ) + case.set( 'ObservationError', + Matrix = None, # None ou matrice carrée + ScalarSparseMatrix = 1.0, # None ou Real > 0 + DiagonalSparseMatrix = None, # None ou vecteur + ) + case.set( 'ObservationOperator', + OneFunction = multisimulation, # MultiFonction [Y] = F([X]) + Parameters = { # Dictionnaire + "DifferentialIncrement":0.0001, # Real > 0 + "CenteredFiniteDifference":False, # Bool + }, + InputFunctionAsMulti = True, # Bool + ) + case.set( 'Observer', + Variable = "CurrentState", # Mot-clé + Template = "ValuePrinter", # Mot-clé + String = None, # None ou code Python + Info = None, # None ou string + + ) + case.execute() + # + # Exploitation independante + # ------------------------- + Xbackground = case.get("Background") + Xoptimum = case.get("Analysis")[-1] + FX_at_optimum = case.get("SimulatedObservationAtOptimum")[-1] + J_values = case.get("CostFunctionJAtCurrentOptimum")[:] + print("") + print("Number of internal iterations...: %i"%len(J_values)) + print("Initial state...................: %s"%(numpy.ravel(Xbackground),)) + print("Optimal state...................: %s"%(numpy.ravel(Xoptimum),)) + print("Simulation at optimal state.....: %s"%(numpy.ravel(FX_at_optimum),)) + print("") + # + ecart = assertAlmostEqualArrays(Xoptimum, [ 2., 3., 4.]) + # + print(" L'écart absolu maximal obtenu lors du test est de %.2e."%ecart) + print(" Les résultats obtenus sont corrects.") + print("") + # + return Xoptimum + +# ============================================================================== +if __name__ == '__main__': + print("\nAUTODIAGNOSTIC\n==============") + unittest.main()