From 58842495258bd1a6df58053982fccb42a49344a0 Mon Sep 17 00:00:00 2001 From: Jean-Philippe ARGAUD Date: Tue, 1 Jul 2014 16:11:28 +0200 Subject: [PATCH] User messages and naming convention improvements --- resources/ADAOSchemaCatalog.xml | 1 - src/daComposant/daAlgorithms/AdjointTest.py | 2 +- src/daComposant/daAlgorithms/FunctionTest.py | 24 ++++++++------- src/daComposant/daAlgorithms/LinearityTest.py | 2 +- .../daAlgorithms/ParticleSwarmOptimization.py | 29 +++++++++++-------- 5 files changed, 32 insertions(+), 26 deletions(-) diff --git a/resources/ADAOSchemaCatalog.xml b/resources/ADAOSchemaCatalog.xml index bc2095d..1b888c1 100644 --- a/resources/ADAOSchemaCatalog.xml +++ b/resources/ADAOSchemaCatalog.xml @@ -489,7 +489,6 @@ ADD.analyze() #-*-coding:iso-8859-1-*- import logging logging.debug("TERMINATE Entering in SimpleUserAnalysis") -from daYacsIntegration.daStudy import * ADD = Study.getAssimilationStudy() # User code is below diff --git a/src/daComposant/daAlgorithms/AdjointTest.py b/src/daComposant/daAlgorithms/AdjointTest.py index cd8288e..fb2f7c8 100644 --- a/src/daComposant/daAlgorithms/AdjointTest.py +++ b/src/daComposant/daAlgorithms/AdjointTest.py @@ -109,7 +109,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): R(Alpha) = | < TangentF_X(dX) , Y > - < dX , AdjointF_X(Y) > | - qui doit rester constamment egal zero a la precision du calcul. + qui doit rester constamment egal a zero a la precision du calcul. On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul. Y doit etre dans l'image de F. S'il n'est pas donne, on prend Y = F(X). """ diff --git a/src/daComposant/daAlgorithms/FunctionTest.py b/src/daComposant/daAlgorithms/FunctionTest.py index 319b9ee..e656b5c 100644 --- a/src/daComposant/daAlgorithms/FunctionTest.py +++ b/src/daComposant/daAlgorithms/FunctionTest.py @@ -62,7 +62,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Hm = HO["Direct"].appliedTo # - Xn = numpy.asmatrix(numpy.ravel( Xb )).T + Xn = copy.copy( Xb ) # # ---------- _p = self._parameters["NumberOfPrintedDigits"] @@ -99,22 +99,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): print("===> Repetition step number %i on a total of %i\n"%(i+1,self._parameters["NumberOfRepetition"])) print("===> Launching direct operator evaluation\n") # - Y = Hm( Xn ) + Yn = Hm( Xn ) # print("\n===> End of direct operator evaluation\n") # msg = ("===> Information after evaluation:\n") - msg += ("\n Characteristics of output vector Y, to compare to other calculations:\n") - msg += (" Type...............: %s\n")%type( Y ) - msg += (" Lenght of vector...: %i\n")%max(numpy.matrix( Y ).shape) - msg += (" Minimum value......: %."+str(_p)+"e\n")%numpy.min( Y ) - msg += (" Maximum value......: %."+str(_p)+"e\n")%numpy.max( Y ) - msg += (" Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Y ) - msg += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Y ) - msg += (" L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Y ) + msg += ("\n Characteristics of simulated output vector Y=H(X), to compare to others:\n") + msg += (" Type...............: %s\n")%type( Yn ) + msg += (" Lenght of vector...: %i\n")%max(numpy.matrix( Yn ).shape) + msg += (" Minimum value......: %."+str(_p)+"e\n")%numpy.min( Yn ) + msg += (" Maximum value......: %."+str(_p)+"e\n")%numpy.max( Yn ) + msg += (" Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Yn ) + msg += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Yn ) + msg += (" L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Yn ) print(msg) # - Ys.append( copy.copy( numpy.ravel(Y) ) ) + Ys.append( copy.copy( numpy.ravel( + Yn + ) ) ) # print(" %s\n"%("-"*75,)) if self._parameters["SetDebug"]: diff --git a/src/daComposant/daAlgorithms/LinearityTest.py b/src/daComposant/daAlgorithms/LinearityTest.py index 2d29fa8..e1f922d 100644 --- a/src/daComposant/daAlgorithms/LinearityTest.py +++ b/src/daComposant/daAlgorithms/LinearityTest.py @@ -27,7 +27,7 @@ import numpy, math # ============================================================================== class ElementaryAlgorithm(BasicObjects.Algorithm): def __init__(self): - BasicObjects.Algorithm.__init__(self, "FUNCTIONTEST") + BasicObjects.Algorithm.__init__(self, "LINEARITYTEST") self.defineRequiredParameter( name = "ResiduFormula", default = "CenteredDL", diff --git a/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py b/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py index 862c350..ba52587 100644 --- a/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py +++ b/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py @@ -128,26 +128,28 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): raise ValueError("Background and Observation error covariance matrix has to be properly defined!") Jb = 0.5 * (_X - Xb).T * BI * (_X - Xb) Jo = 0.5 * (Y - _HX).T * RI * (Y - _HX) - J = float( Jb ) + float( Jo ) elif QualityMeasure in ["WeightedLeastSquares","WLS","PonderatedLeastSquares","PLS"]: if RI is None: raise ValueError("Observation error covariance matrix has to be properly defined!") Jb = 0. Jo = 0.5 * (Y - _HX).T * RI * (Y - _HX) - J = float( Jb ) + float( Jo ) elif QualityMeasure in ["LeastSquares","LS","L2"]: Jb = 0. Jo = 0.5 * (Y - _HX).T * (Y - _HX) - J = float( Jb ) + float( Jo ) elif QualityMeasure in ["AbsoluteValue","L1"]: Jb = 0. Jo = numpy.sum( numpy.abs(Y - _HX) ) - J = float( Jb ) + float( Jo ) elif QualityMeasure in ["MaximumError","ME"]: Jb = 0. Jo = numpy.max( numpy.abs(Y - _HX) ) - J = float( Jb ) + float( Jo ) # + J = float( Jb ) + float( Jo ) + # + if self._parameters["StoreInternalVariables"]: + self.StoredVariables["CurrentState"].store( _X ) + self.StoredVariables["CostFunctionJb"].store( Jb ) + self.StoredVariables["CostFunctionJo"].store( Jo ) + self.StoredVariables["CostFunctionJ" ].store( J ) return J # # Point de démarrage de l'optimisation : Xini = Xb @@ -183,18 +185,19 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): qBest = CostFunction(Best,self._parameters["QualityCriterion"]) # for i in range(self._parameters["NumberOfInsects"]): - insect = numpy.array(PosInsect[:,i].A1) + insect = numpy.ravel(PosInsect[:,i]) quality = CostFunction(insect,self._parameters["QualityCriterion"]) qBestPosInsect.append(quality) if quality < qBest: - Best = insect - qBest = quality + Best = copy.copy( insect ) + qBest = copy.copy( quality ) + logging.debug("%s Initialisation, Insecte = %s, Qualité = %s"%(self._name, str(Best), str(qBest))) # # Minimisation de la fonctionnelle # -------------------------------- for n in range(self._parameters["MaximumNumberOfSteps"]): for i in range(self._parameters["NumberOfInsects"]) : - insect = PosInsect[:,i] + insect = numpy.ravel(PosInsect[:,i]) rp = numpy.random.uniform(size=nbparam) rg = numpy.random.uniform(size=nbparam) for j in range(nbparam) : @@ -202,10 +205,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): PosInsect[j,i] = PosInsect[j,i]+VelocityInsect[j,i] quality = CostFunction(insect,self._parameters["QualityCriterion"]) if quality < qBestPosInsect[i]: - BestPosInsect[:,i] = numpy.ravel( insect ) + BestPosInsect[:,i] = copy.copy( insect ) + qBestPosInsect[i] = copy.copy( quality ) if quality < qBest : - Best = numpy.ravel( insect ) - qBest = quality + Best = copy.copy( insect ) + qBest = copy.copy( quality ) + logging.debug("%s Etape %i, Insecte = %s, Qualité = %s"%(self._name, n, str(Best), str(qBest))) # if self._parameters["StoreInternalVariables"]: self.StoredVariables["CurrentState"].store( Best ) -- 2.39.2