From: Jean-Philippe ARGAUD Date: Mon, 24 Jun 2013 12:46:23 +0000 (+0200) Subject: Improving test outputs X-Git-Tag: V7_3_0~26 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=f4ef5db3097b62e3efcacf824caa906b2e8f1500;p=modules%2Fadao.git Improving test outputs --- diff --git a/src/daComposant/daAlgorithms/AdjointTest.py b/src/daComposant/daAlgorithms/AdjointTest.py index 9734bb9..66f25fb 100644 --- a/src/daComposant/daAlgorithms/AdjointTest.py +++ b/src/daComposant/daAlgorithms/AdjointTest.py @@ -73,23 +73,16 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): logging.debug("%s Lancement"%self._name) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) # - # Paramètres de pilotage - # ---------------------- self.setParameters(Parameters) # - # Opérateurs - # ---------- Hm = HO["Direct"].appliedTo Ht = HO["Tangent"].appliedInXTo Ha = HO["Adjoint"].appliedInXTo # - # Construction des perturbations - # ------------------------------ + # ---------- Perturbations = [ 10**i for i in xrange(self._parameters["EpsilonMinimumExponent"],1) ] Perturbations.reverse() # - # Calcul du point courant - # ----------------------- X = numpy.asmatrix(numpy.ravel( Xb )).T NormeX = numpy.linalg.norm( X ) if Y is None: @@ -97,8 +90,6 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): Y = numpy.asmatrix(numpy.ravel( Y )).T NormeY = numpy.linalg.norm( Y ) # - # Fabrication de la direction de l'incrément dX - # ---------------------------------------------- if len(self._parameters["InitialDirection"]) == 0: dX0 = [] for v in X.A1: @@ -111,11 +102,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # dX0 = float(self._parameters["AmplitudeOfInitialDirection"]) * numpy.matrix( dX0 ).T # - # Utilisation de F(X) si aucune observation n'est donnee - # ------------------------------------------------------ - # - # Entete des resultats - # -------------------- + # ---------- if self._parameters["ResiduFormula"] is "ScalarProduct": __doc__ = """ On observe le residu qui est la difference de deux produits scalaires : @@ -145,8 +132,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # Normalisation= -1 # - # Boucle sur les perturbations - # ---------------------------- + # ---------- for i,amplitude in enumerate(Perturbations): dX = amplitude * dX0 NormedX = numpy.linalg.norm( dX ) @@ -163,8 +149,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msgs += "\n" + "-"*nbtirets msgs += "\n" # - # Sorties eventuelles - # ------------------- + # ---------- print print "Results of adjoint stability check:" print msgs diff --git a/src/daComposant/daAlgorithms/FunctionTest.py b/src/daComposant/daAlgorithms/FunctionTest.py index fd66e56..f19ec16 100644 --- a/src/daComposant/daAlgorithms/FunctionTest.py +++ b/src/daComposant/daAlgorithms/FunctionTest.py @@ -36,25 +36,24 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): typecast = str, message = "Titre du tableau et de la figure", ) + self.defineRequiredParameter( + name = "SetDebug", + default = True, + typecast = bool, + message = "Activation du mode debug lors de l'exécution", + ) def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): logging.debug("%s Lancement"%self._name) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) # - # Paramètres de pilotage - # ---------------------- self.setParameters(Parameters) # - # Opérateur - # --------- Hm = HO["Direct"].appliedTo # - # Calcul du point nominal - # ----------------------- Xn = numpy.asmatrix(numpy.ravel( Xb )).T # - # Test - # ---- + # ---------- if len(self._parameters["ResultTitle"]) > 0: msg = " ====" + "="*len(self._parameters["ResultTitle"]) + "====\n" msg += " " + self._parameters["ResultTitle"] + "\n" @@ -73,12 +72,17 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msg += " L2 norm of vector..: %.5e\n"%numpy.linalg.norm( Xn ) print(msg) # - CUR_LEVEL = logging.getLogger().getEffectiveLevel() - logging.getLogger().setLevel(logging.DEBUG) - print( "===> Launching direct operator evaluation, activating debug\n") + if self._parameters["SetDebug"]: + CUR_LEVEL = logging.getLogger().getEffectiveLevel() + logging.getLogger().setLevel(logging.DEBUG) + print("===> Beginning of evaluation, activating debug\n") + else: + print("===> Beginning of evaluation, without activating debug\n") + print(" %s\n"%("-"*75,)) + # + print("===> Launching direct operator evaluation\n") Y = Hm( Xn ) - print("\n===> End of direct operator evaluation, deactivating debug\n") - logging.getLogger().setLevel(CUR_LEVEL) + print("\n===> End of direct operator evaluation\n") # msg = "===> Information after launching:\n" msg += " ----------------------------\n" @@ -92,6 +96,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msg += " L2 norm of vector..: %.5e\n"%numpy.linalg.norm( Y ) print(msg) # + print(" %s\n"%("-"*75,)) + if self._parameters["SetDebug"]: + print("===> End evaluation, deactivating debug if necessary\n") + logging.getLogger().setLevel(CUR_LEVEL) + # logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) logging.debug("%s Terminé"%self._name) # diff --git a/src/daComposant/daAlgorithms/GradientTest.py b/src/daComposant/daAlgorithms/GradientTest.py index 84d0dfa..2e6b728 100644 --- a/src/daComposant/daAlgorithms/GradientTest.py +++ b/src/daComposant/daAlgorithms/GradientTest.py @@ -92,30 +92,21 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): logging.debug("%s Lancement"%self._name) logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) # - # Paramètres de pilotage - # ---------------------- self.setParameters(Parameters) # - # Opérateurs - # ---------- Hm = HO["Direct"].appliedTo if self._parameters["ResiduFormula"] is "Taylor": Ht = HO["Tangent"].appliedInXTo # - # Construction des perturbations - # ------------------------------ + # ---------- Perturbations = [ 10**i for i in xrange(self._parameters["EpsilonMinimumExponent"],1) ] Perturbations.reverse() # - # Calcul du point courant - # ----------------------- X = numpy.asmatrix(numpy.ravel( Xb )).T FX = numpy.asmatrix(numpy.ravel( Hm( X ) )).T NormeX = numpy.linalg.norm( X ) NormeFX = numpy.linalg.norm( FX ) # - # Fabrication de la direction de l'incrément dX - # ---------------------------------------------- if len(self._parameters["InitialDirection"]) == 0: dX0 = [] for v in X.A1: @@ -128,14 +119,11 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # dX0 = float(self._parameters["AmplitudeOfInitialDirection"]) * numpy.matrix( dX0 ).T # - # Calcul du gradient au point courant X pour l'incrément dX - # --------------------------------------------------------- if self._parameters["ResiduFormula"] is "Taylor": GradFxdX = Ht( (X, dX0) ) GradFxdX = numpy.asmatrix(numpy.ravel( GradFxdX )).T # - # Entete des resultats - # -------------------- + # ---------- if self._parameters["ResiduFormula"] is "Taylor": __doc__ = """ On observe le residu issu du développement de Taylor de la fonction F, @@ -192,8 +180,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): NormesdFXsAm = [] NormesdFXGdX = [] # - # Boucle sur les perturbations - # ---------------------------- + # ---------- for i,amplitude in enumerate(Perturbations): dX = amplitude * dX0 # @@ -234,8 +221,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msgs += "\n" + "-"*nbtirets msgs += "\n" # - # Sorties eventuelles - # ------------------- + # ---------- print print "Results of gradient stability check:" print msgs