From: Jean-Philippe ARGAUD Date: Sun, 3 Feb 2019 20:44:53 +0000 (+0100) Subject: Minor updates for module behavior and tests X-Git-Tag: V9_3_0.1-prealpha1~2 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=6df1c4913064b16ffa25b257fbc844f568344b08;p=modules%2Fadao.git Minor updates for module behavior and tests --- diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index d600275..0dc21b0 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -203,12 +203,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] ) if self._toStore("SimulatedObservationAtCurrentOptimum"): self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] ) - if self._toStore("CostFunctionJAtCurrentOptimum"): - self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] ) if self._toStore("CostFunctionJbAtCurrentOptimum"): self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] ) if self._toStore("CostFunctionJoAtCurrentOptimum"): self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] ) + if self._toStore("CostFunctionJAtCurrentOptimum"): + self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] ) return J # def GradientOfCostFunction(x): diff --git a/src/daComposant/daAlgorithms/Blue.py b/src/daComposant/daAlgorithms/Blue.py index 6dcca39..f93ace8 100644 --- a/src/daComposant/daAlgorithms/Blue.py +++ b/src/daComposant/daAlgorithms/Blue.py @@ -46,8 +46,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "APosterioriVariances", "BMA", "CostFunctionJ", + "CostFunctionJAtCurrentOptimum", "CostFunctionJb", + "CostFunctionJbAtCurrentOptimum", "CostFunctionJo", + "CostFunctionJoAtCurrentOptimum", + "CurrentOptimum", "CurrentState", "Innovation", "MahalanobisConsistency", @@ -56,6 +60,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "SigmaBck2", "SigmaObs2", "SimulatedObservationAtBackground", + "SimulatedObservationAtCurrentOptimum", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum", "SimulationQuantiles", @@ -136,17 +141,22 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Calcul de la fonction coût # -------------------------- if self._parameters["StoreInternalVariables"] or \ - self._toStore("CostFunctionJ") or \ + self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \ self._toStore("OMA") or \ self._toStore("SigmaObs2") or \ self._toStore("MahalanobisConsistency") or \ + self._toStore("SimulatedObservationAtCurrentOptimum") or \ self._toStore("SimulatedObservationAtCurrentState") or \ self._toStore("SimulatedObservationAtOptimum") or \ self._toStore("SimulationQuantiles"): HXa = Hm * Xa oma = Y - HXa if self._parameters["StoreInternalVariables"] or \ - self._toStore("CostFunctionJ") or \ + self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \ self._toStore("MahalanobisConsistency"): Jb = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) ) Jo = float( 0.5 * oma.T * RI * oma ) @@ -154,6 +164,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) + self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb ) + self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo ) + self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J ) # # Calcul de la covariance d'analyse # --------------------------------- @@ -177,6 +190,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # --------------------------------------- if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) ) + if self._toStore("CurrentOptimum"): + self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) ) if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( numpy.ravel(d) ) if self._toStore("BMA"): @@ -219,6 +234,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) ) + if self._toStore("SimulatedObservationAtCurrentOptimum"): + self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) ) if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # diff --git a/src/daComposant/daAlgorithms/ExtendedBlue.py b/src/daComposant/daAlgorithms/ExtendedBlue.py index c6856b9..1af6a09 100644 --- a/src/daComposant/daAlgorithms/ExtendedBlue.py +++ b/src/daComposant/daAlgorithms/ExtendedBlue.py @@ -45,20 +45,25 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): "APosterioriStandardDeviations", "APosterioriVariances", "BMA", - "OMA", - "OMB", - "CurrentState", "CostFunctionJ", + "CostFunctionJAtCurrentOptimum", "CostFunctionJb", + "CostFunctionJbAtCurrentOptimum", "CostFunctionJo", + "CostFunctionJoAtCurrentOptimum", + "CurrentOptimum", + "CurrentState", "Innovation", + "MahalanobisConsistency", + "OMA", + "OMB", "SigmaBck2", "SigmaObs2", - "MahalanobisConsistency", - "SimulationQuantiles", "SimulatedObservationAtBackground", + "SimulatedObservationAtCurrentOptimum", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum", + "SimulationQuantiles", ] ) self.defineRequiredParameter( @@ -137,17 +142,22 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Calcul de la fonction coût # -------------------------- if self._parameters["StoreInternalVariables"] or \ - self._toStore("CostFunctionJ") or \ + self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \ self._toStore("OMA") or \ self._toStore("SigmaObs2") or \ self._toStore("MahalanobisConsistency") or \ + self._toStore("SimulatedObservationAtCurrentOptimum") or \ self._toStore("SimulatedObservationAtCurrentState") or \ self._toStore("SimulatedObservationAtOptimum") or \ self._toStore("SimulationQuantiles"): HXa = numpy.matrix(numpy.ravel( H( Xa ) )).T oma = Y - HXa if self._parameters["StoreInternalVariables"] or \ - self._toStore("CostFunctionJ") or \ + self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \ self._toStore("MahalanobisConsistency"): Jb = float( 0.5 * (Xa - Xb).T * BI * (Xa - Xb) ) Jo = float( 0.5 * oma.T * RI * oma ) @@ -155,6 +165,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) + self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb ) + self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo ) + self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J ) # # Calcul de la covariance d'analyse # --------------------------------- @@ -178,6 +191,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # --------------------------------------- if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) ) + if self._toStore("CurrentOptimum"): + self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) ) if self._toStore("Innovation"): self.StoredVariables["Innovation"].store( numpy.ravel(d) ) if self._toStore("BMA"): @@ -222,6 +237,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) ) + if self._toStore("SimulatedObservationAtCurrentOptimum"): + self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) ) if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # diff --git a/src/daComposant/daAlgorithms/LinearLeastSquares.py b/src/daComposant/daAlgorithms/LinearLeastSquares.py index b064ee7..15798c5 100644 --- a/src/daComposant/daAlgorithms/LinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/LinearLeastSquares.py @@ -39,7 +39,20 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): default = [], typecast = tuple, message = "Liste de calculs supplémentaires à stocker et/ou effectuer", - listval = ["OMA", "CurrentState", "CostFunctionJ", "CostFunctionJb", "CostFunctionJo", "SimulatedObservationAtCurrentState", "SimulatedObservationAtOptimum"] + listval = [ + "CostFunctionJ", + "CostFunctionJAtCurrentOptimum", + "CostFunctionJb", + "CostFunctionJbAtCurrentOptimum", + "CostFunctionJo", + "CostFunctionJoAtCurrentOptimum", + "CurrentOptimum", + "CurrentState", + "OMA", + "SimulatedObservationAtCurrentOptimum", + "SimulatedObservationAtCurrentState", + "SimulatedObservationAtOptimum", + ] ) self.requireInputArguments( mandatory= ("Y", "HO", "R"), @@ -64,28 +77,43 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Calcul de la fonction coût # -------------------------- if self._parameters["StoreInternalVariables"] or \ - self._toStore("CostFunctionJ") or \ + self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum") or \ self._toStore("OMA") or \ + self._toStore("SimulatedObservationAtCurrentOptimum") or \ + self._toStore("SimulatedObservationAtCurrentState") or \ self._toStore("SimulatedObservationAtOptimum"): HXa = Hm * Xa oma = Y - HXa if self._parameters["StoreInternalVariables"] or \ - self._toStore("CostFunctionJ"): + self._toStore("CostFunctionJ") or self._toStore("CostFunctionJAtCurrentOptimum") or \ + self._toStore("CostFunctionJb") or self._toStore("CostFunctionJbAtCurrentOptimum") or \ + self._toStore("CostFunctionJo") or self._toStore("CostFunctionJoAtCurrentOptimum"): Jb = 0. - Jo = 0.5 * oma.T * RI * oma - J = float( Jb ) + float( Jo ) + Jo = float( 0.5 * oma.T * RI * oma ) + J = Jb + Jo self.StoredVariables["CostFunctionJb"].store( Jb ) self.StoredVariables["CostFunctionJo"].store( Jo ) self.StoredVariables["CostFunctionJ" ].store( J ) + self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( Jb ) + self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( Jo ) + self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( J ) # # Calculs et/ou stockages supplémentaires # --------------------------------------- if self._parameters["StoreInternalVariables"] or self._toStore("CurrentState"): self.StoredVariables["CurrentState"].store( numpy.ravel(Xa) ) + if self._toStore("CurrentOptimum"): + self.StoredVariables["CurrentOptimum"].store( numpy.ravel(Xa) ) if self._toStore("OMA"): self.StoredVariables["OMA"].store( numpy.ravel(oma) ) + if self._toStore("SimulatedObservationAtBackground"): + self.StoredVariables["SimulatedObservationAtBackground"].store( numpy.ravel(HXb) ) if self._toStore("SimulatedObservationAtCurrentState"): self.StoredVariables["SimulatedObservationAtCurrentState"].store( numpy.ravel(HXa) ) + if self._toStore("SimulatedObservationAtCurrentOptimum"): + self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( numpy.ravel(HXa) ) if self._toStore("SimulatedObservationAtOptimum"): self.StoredVariables["SimulatedObservationAtOptimum"].store( numpy.ravel(HXa) ) # diff --git a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py index c1e76ac..acd77da 100644 --- a/src/daComposant/daAlgorithms/NonLinearLeastSquares.py +++ b/src/daComposant/daAlgorithms/NonLinearLeastSquares.py @@ -170,12 +170,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): self.StoredVariables["CurrentOptimum"].store( self.StoredVariables["CurrentState"][IndexMin] ) if self._toStore("SimulatedObservationAtCurrentOptimum"): self.StoredVariables["SimulatedObservationAtCurrentOptimum"].store( self.StoredVariables["SimulatedObservationAtCurrentState"][IndexMin] ) - if self._toStore("CostFunctionJAtCurrentOptimum"): - self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] ) if self._toStore("CostFunctionJbAtCurrentOptimum"): self.StoredVariables["CostFunctionJbAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJb"][IndexMin] ) if self._toStore("CostFunctionJoAtCurrentOptimum"): self.StoredVariables["CostFunctionJoAtCurrentOptimum"].store( self.StoredVariables["CostFunctionJo"][IndexMin] ) + if self._toStore("CostFunctionJAtCurrentOptimum"): + self.StoredVariables["CostFunctionJAtCurrentOptimum" ].store( self.StoredVariables["CostFunctionJ" ][IndexMin] ) return J # def GradientOfCostFunction(x): diff --git a/test/test6904/CTestTestfile.cmake b/test/test6904/CTestTestfile.cmake index 0b0a3d1..552e54d 100644 --- a/test/test6904/CTestTestfile.cmake +++ b/test/test6904/CTestTestfile.cmake @@ -21,6 +21,7 @@ SET(TEST_NAMES Definition_complete_de_cas_3DVAR + Definition_complete_de_cas_Blue ) FOREACH(tfile ${TEST_NAMES}) diff --git a/test/test6904/Definition_complete_de_cas_Blue.py b/test/test6904/Definition_complete_de_cas_Blue.py new file mode 100644 index 0000000..3daa993 --- /dev/null +++ b/test/test6904/Definition_complete_de_cas_Blue.py @@ -0,0 +1,167 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2008-2019 EDF R&D +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# +# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com +# +# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D +"Verification d'un exemple de la documentation" + +import sys +import unittest +import numpy + +# ============================================================================== +# +# Construction artificielle d'un exemple de donnees utilisateur +# ------------------------------------------------------------- +alpha = 5. +beta = 7 +gamma = 9.0 +# +alphamin, alphamax = 0., 10. +betamin, betamax = 3, 13 +gammamin, gammamax = 1.5, 15.5 +# +def simulation(x): + "Fonction de simulation H pour effectuer Y=H(X)" + import numpy + __x = numpy.matrix(numpy.ravel(numpy.matrix(x))).T + __H = numpy.matrix("1 0 0;0 2 0;0 0 3; 1 2 3") + return __H * __x +# +def multisimulation( xserie ): + yserie = [] + for x in xserie: + yserie.append( simulation( x ) ) + return yserie +# +# Observations obtenues par simulation +# ------------------------------------ +observations = simulation((2, 3, 4)) + +# ============================================================================== +class InTest(unittest.TestCase): + def test1(self): + print("""Exemple de la doc : + + Exploitation independante des resultats d'un cas de calcul + ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + """) + # + import numpy + from adao import adaoBuilder + # + # Mise en forme des entrees + # ------------------------- + Xb = (alpha, beta, gamma) + Bounds = ( + (alphamin, alphamax), + (betamin, betamax ), + (gammamin, gammamax)) + # + # TUI ADAO + # -------- + case = adaoBuilder.New() + case.set( 'AlgorithmParameters', + Algorithm = 'Blue', # Mots-clé réservé + Parameters = { # Dictionnaire + "StoreSupplementaryCalculations":[# Liste de mots-clés réservés + "CostFunctionJAtCurrentOptimum", + "CostFunctionJoAtCurrentOptimum", + "CurrentOptimum", + "SimulatedObservationAtCurrentOptimum", + "SimulatedObservationAtOptimum", + ], + } + ) + case.set( 'Background', + Vector = numpy.array(Xb), # array, list, tuple, matrix + Stored = True, # Bool + ) + case.set( 'Observation', + Vector = numpy.array(observations), # array, list, tuple, matrix + Stored = False, # Bool + ) + case.set( 'BackgroundError', + Matrix = None, # None ou matrice carrée + ScalarSparseMatrix = 1.0e10, # None ou Real > 0 + DiagonalSparseMatrix = None, # None ou vecteur + ) + case.set( 'ObservationError', + Matrix = None, # None ou matrice carrée + ScalarSparseMatrix = 1.0, # None ou Real > 0 + DiagonalSparseMatrix = None, # None ou vecteur + ) + case.set( 'ObservationOperator', + OneFunction = multisimulation, # MultiFonction [Y] = F([X]) + Parameters = { # Dictionnaire + "DifferentialIncrement":0.0001, # Real > 0 + "CenteredFiniteDifference":False, # Bool + }, + InputFunctionAsMulti = True, # Bool + ) + case.set( 'Observer', + Variable = "CurrentState", # Mot-clé + Template = "ValuePrinter", # Mot-clé + String = None, # None ou code Python + Info = None, # None ou string + + ) + case.execute() + # + # Exploitation independante + # ------------------------- + Xbackground = case.get("Background") + Xoptimum = case.get("Analysis")[-1] + FX_at_optimum = case.get("SimulatedObservationAtOptimum")[-1] + J_values = case.get("CostFunctionJAtCurrentOptimum")[:] + print("") + print("Number of internal iterations...: %i"%len(J_values)) + print("Initial state...................: %s"%(numpy.ravel(Xbackground),)) + print("Optimal state...................: %s"%(numpy.ravel(Xoptimum),)) + print("Simulation at optimal state.....: %s"%(numpy.ravel(FX_at_optimum),)) + print("") + # + # Fin du cas + # ---------- + ecart = assertAlmostEqualArrays(Xoptimum, [ 2., 3., 4.]) + # + print(" L'écart absolu maximal obtenu lors du test est de %.2e."%ecart) + print(" Les résultats obtenus sont corrects.") + print("") + # + return Xoptimum + +# ============================================================================== +def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None): + "Compare two vectors, like unittest.assertAlmostEqual" + import numpy + if msg is not None: + print(msg) + if delta is not None: + if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any(): + raise AssertionError("%s != %s within %s places"%(first,second,delta)) + else: + if ( (numpy.asarray(first) - numpy.asarray(second)) > 10**(-int(places)) ).any(): + raise AssertionError("%s != %s within %i places"%(first,second,places)) + return max(abs(numpy.asarray(first) - numpy.asarray(second))) + +# ============================================================================== +if __name__ == '__main__': + print("\nAUTODIAGNOSTIC\n==============") + unittest.main()