From: Jean-Philippe ARGAUD Date: Mon, 25 Jun 2012 14:51:05 +0000 (+0200) Subject: Adding adjoint checker and internal tests X-Git-Tag: V6_6_0~47 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=8c2f7801545dda628ef4f1a726744a998e472466;p=modules%2Fadao.git Adding adjoint checker and internal tests --- diff --git a/examples/daSalome/test009_ADAO_Simple_GradientTest.comm b/examples/daSalome/test009_ADAO_Simple_GradientTest.comm new file mode 100644 index 0000000..dddb3e6 --- /dev/null +++ b/examples/daSalome/test009_ADAO_Simple_GradientTest.comm @@ -0,0 +1,13 @@ + +CHECKING_STUDY(Study_name='Test', + Debug=0, + Algorithm='GradientTest', + CheckingPoint=_F(Stored=0, + INPUT_TYPE='Vector', + data=_F(FROM='String', + STRING='1 1 1',),), + ObservationOperator=_F(Stored=0, + INPUT_TYPE='Matrix', + data=_F(FROM='String', + STRING='1 0 0 ; 0 1 0 ; 0 0 1 ; 1 0 0 ; 0 1 0 ; 0 0 1',),),); +#CHECKSUM:68032af53e12f34f4d709332ef02b294 -:FIN CHECKSUM \ No newline at end of file diff --git a/examples/daSalome/test010_ADAO_Simple_AdjointTest.comm b/examples/daSalome/test010_ADAO_Simple_AdjointTest.comm new file mode 100644 index 0000000..aac3dc0 --- /dev/null +++ b/examples/daSalome/test010_ADAO_Simple_AdjointTest.comm @@ -0,0 +1,13 @@ + +CHECKING_STUDY(Study_name='Test', + Debug=0, + Algorithm='AdjointTest', + CheckingPoint=_F(Stored=0, + INPUT_TYPE='Vector', + data=_F(FROM='String', + STRING='1 1 1',),), + ObservationOperator=_F(Stored=0, + INPUT_TYPE='Matrix', + data=_F(FROM='String', + STRING='1 0 0 ; 0 1 0 ; 0 0 1 ; 1 0 0 ; 0 1 0 ; 0 0 1',),),); +#CHECKSUM:f57c937e2c8bbc8e3f96a4b0ab10b907 -:FIN CHECKSUM \ No newline at end of file diff --git a/src/daComposant/daAlgorithms/AdjointTest.py b/src/daComposant/daAlgorithms/AdjointTest.py new file mode 100644 index 0000000..2eeceab --- /dev/null +++ b/src/daComposant/daAlgorithms/AdjointTest.py @@ -0,0 +1,178 @@ +#-*-coding:iso-8859-1-*- +# +# Copyright (C) 2008-2012 EDF R&D +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# +# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com +# + +import logging +from daCore import BasicObjects, PlatformInfo +m = PlatformInfo.SystemUsage() + +import numpy + +# ============================================================================== +class ElementaryAlgorithm(BasicObjects.Algorithm): + def __init__(self): + BasicObjects.Algorithm.__init__(self, "ADJOINTTEST") + self.defineRequiredParameter( + name = "ResiduFormula", + default = "ScalarProduct", + typecast = str, + message = "Formule de résidu utilisée", + listval = ["ScalarProduct"], + ) + self.defineRequiredParameter( + name = "EpsilonMinimumExponent", + default = -8, + typecast = int, + message = "Exposant minimal en puissance de 10 pour le multiplicateur d'incrément", + minval = -20, + maxval = 0, + ) + self.defineRequiredParameter( + name = "InitialDirection", + default = [], + typecast = list, + message = "Direction initiale de la dérivée directionnelle autour du point nominal", + ) + self.defineRequiredParameter( + name = "AmplitudeOfInitialDirection", + default = 1., + typecast = float, + message = "Amplitude de la direction initiale de la dérivée directionnelle autour du point nominal", + ) + self.defineRequiredParameter( + name = "SetSeed", + typecast = numpy.random.seed, + message = "Graine fixée pour le générateur aléatoire", + ) + self.defineRequiredParameter( + name = "ResultTitle", + default = "", + typecast = str, + message = "Titre du tableau et de la figure", + ) + + def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None): + logging.debug("%s Lancement"%self._name) + logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("Mo"))) + # + # Paramètres de pilotage + # ---------------------- + self.setParameters(Parameters) + # + # Opérateur d'observation + # ----------------------- + Hm = H["Direct"].appliedTo + Ht = H["Tangent"].appliedInXTo + Ha = H["Adjoint"].appliedInXTo + # + # Construction des perturbations + # ------------------------------ + Perturbations = [ 10**i for i in xrange(self._parameters["EpsilonMinimumExponent"],1) ] + Perturbations.reverse() + # + # Calcul du point courant + # ----------------------- + X = numpy.asmatrix(Xb).flatten().T + NormeX = numpy.linalg.norm( X ) + if Y is None: + Y = numpy.asmatrix( Hm( X ) ).flatten().T + Y = numpy.asmatrix(Y).flatten().T + NormeY = numpy.linalg.norm( Y ) + # + # Fabrication de la direction de l'incrément dX + # ---------------------------------------------- + if len(self._parameters["InitialDirection"]) == 0: + dX0 = [] + for v in X.A1: + if abs(v) > 1.e-8: + dX0.append( numpy.random.normal(0.,abs(v)) ) + else: + dX0.append( numpy.random.normal(0.,X.mean()) ) + else: + dX0 = numpy.asmatrix(self._parameters["InitialDirection"]).flatten() + # + dX0 = float(self._parameters["AmplitudeOfInitialDirection"]) * numpy.matrix( dX0 ).T + # + # Utilisation de F(X) si aucune observation n'est donnee + # ------------------------------------------------------ + # + # Entete des resultats + # -------------------- + if self._parameters["ResiduFormula"] is "ScalarProduct": + __doc__ = """ + On observe le residu qui est la difference de deux produits scalaires : + + R(Alpha) = | < TangentF_X(dX) , Y > - < dX , AdjointF_X(Y) > | + + qui doit rester constamment egal zero a la precision du calcul. + On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul. + Y doit etre dans l'image de F. S'il n'est pas donne, on prend Y = F(X). + """ + else: + __doc__ = "" + # + msgs = " ====" + "="*len(self._parameters["ResultTitle"]) + "====\n" + msgs += " " + self._parameters["ResultTitle"] + "\n" + msgs += " ====" + "="*len(self._parameters["ResultTitle"]) + "====\n" + msgs += __doc__ + # + msg = " i Alpha ||X|| ||Y|| ||dX|| R(Alpha) " + nbtirets = len(msg) + msgs += "\n" + "-"*nbtirets + msgs += "\n" + msg + msgs += "\n" + "-"*nbtirets + # + Normalisation= -1 + # + # Boucle sur les perturbations + # ---------------------------- + for i,amplitude in enumerate(Perturbations): + logging.debug("%s Etape de calcul numéro %i, avec la perturbation %8.3e"%(self._name, i, amplitude)) + # + dX = amplitude * dX0 + NormedX = numpy.linalg.norm( dX ) + # + TangentFXdX = numpy.asmatrix( Ht( (X,dX) ) ) + AdjointFXY = numpy.asmatrix( Ha( (X,Y) ) ) + # + Residu = abs(float(numpy.dot( TangentFXdX.A1 , Y.A1 ) - numpy.dot( dX.A1 , AdjointFXY.A1 ))) + # + msg = " %2i %5.0e %9.3e %9.3e %9.3e | %9.3e"%(i,amplitude,NormeX,NormeY,NormedX,Residu) + msgs += "\n" + msg + # + self.StoredVariables["CostFunctionJ"].store( Residu ) + msgs += "\n" + "-"*nbtirets + msgs += "\n" + # + # Sorties eventuelles + # ------------------- + logging.debug("%s Résultats :\n%s"%(self._name, msgs)) + print + print "Results of adjoint stability check:" + print msgs + # + logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("Mo"))) + logging.debug("%s Terminé"%self._name) + # + return 0 + +# ============================================================================== +if __name__ == "__main__": + print '\n AUTODIAGNOSTIC \n' diff --git a/src/daComposant/daAlgorithms/GradientTest.py b/src/daComposant/daAlgorithms/GradientTest.py index 03b0680..00a22f6 100644 --- a/src/daComposant/daAlgorithms/GradientTest.py +++ b/src/daComposant/daAlgorithms/GradientTest.py @@ -69,7 +69,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): ) self.defineRequiredParameter( name = "ResultFile", - default = "gradient_result_file", + default = self._name+"_result_file", typecast = str, message = "Nom de base (hors extension) des fichiers de sauvegarde des résultats", ) @@ -137,23 +137,23 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # -------------------- if self._parameters["ResiduFormula"] is "Taylor": __doc__ = """ - On observe le residu issu du développement de Taylor de la fonction H : + On observe le residu issu du développement de Taylor de la fonction F : - R(Alpha) = || H(x+Alpha*dx) - H(x) - Alpha * TangentH_x(dx) || + R(Alpha) = || F(X+Alpha*dX) - F(X) - Alpha * TangentF_X(dX) || Ce résidu doit décroître en Alpha**2 selon Alpha. - On prend dX0 = Normal(0,X) et dX = Alpha*dX0. H est le code de calcul. + On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul. """ elif self._parameters["ResiduFormula"] is "Norm": __doc__ = """ On observe le residu, qui est une approximation du gradient : - || H(X+Alpha*dX) - H(X) || + || F(X+Alpha*dX) - F(X) || R(Alpha) = --------------------------- Alpha qui doit rester constant jusqu'à ce qu'on atteigne la précision du calcul. - On prend dX0 = Normal(0,X) et dX = Alpha*dX0. H est le code de calcul. + On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul. """ else: __doc__ = "" @@ -163,7 +163,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msgs += " ====" + "="*len(self._parameters["ResultTitle"]) + "====\n" msgs += __doc__ # - msg = " i Alpha ||X|| ||H(X)|| ||H(X+dX)|| ||dX|| ||H(X+dX)-H(X)|| ||H(X+dX)-H(X)||/||dX|| R(Alpha) " + msg = " i Alpha ||X|| ||F(X)|| ||F(X+dX)|| ||dX|| ||F(X+dX)-F(X)|| ||F(X+dX)-F(X)||/||dX|| R(Alpha) " nbtirets = len(msg) msgs += "\n" + "-"*nbtirets msgs += "\n" + msg @@ -214,7 +214,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Residu = NormedFXsAm if Normalisation < 0 : Normalisation = Residu # - msg = " %2i %5.0e %8.3e %8.3e %8.3e %8.3e %8.3e | %8.3e | %8.3e"%(i,amplitude,NormeX,NormeFX,NormeFXdX,NormedX,NormedFX,NormedFXsdX,Residu) + msg = " %2i %5.0e %9.3e %9.3e %9.3e %9.3e %9.3e | %9.3e | %9.3e"%(i,amplitude,NormeX,NormeFX,NormeFXdX,NormedX,NormedFX,NormedFXsdX,Residu) msgs += "\n" + msg # self.StoredVariables["CostFunctionJ"].store( Residu ) diff --git a/src/daSalome/daYacsSchemaCreator/infos_daComposant.py b/src/daSalome/daYacsSchemaCreator/infos_daComposant.py index 95e3698..04e14af 100644 --- a/src/daSalome/daYacsSchemaCreator/infos_daComposant.py +++ b/src/daSalome/daYacsSchemaCreator/infos_daComposant.py @@ -108,6 +108,11 @@ AlgoDataRequirements["GradientTest"] = [ "ObservationOperator", ] +AlgoDataRequirements["AdjointTest"] = [ + "CheckingPoint", + "ObservationOperator", + ] + AlgoType = {} AlgoType["3DVAR"] = "Optim" AlgoType["Blue"] = "Optim"