From: Jean-Philippe ARGAUD Date: Mon, 24 Jun 2013 13:51:43 +0000 (+0200) Subject: Adding function repetition algorithm X-Git-Tag: V7_3_0~25 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=9184fbf9ad6469ecd9b1f2cc556d333203e9c60c;p=modules%2Fadao.git Adding function repetition algorithm --- diff --git a/doc/en/reference.rst b/doc/en/reference.rst index 731e12c..f2261fb 100644 --- a/doc/en/reference.rst +++ b/doc/en/reference.rst @@ -669,6 +669,7 @@ Options and required commands for checking algorithms .. index:: single: FunctionTest .. index:: single: GradientTest .. index:: single: LinearityTest +.. index:: single: FunctionRepetitionTest .. index:: single: AlgorithmParameters .. index:: single: AmplitudeOfInitialDirection @@ -728,7 +729,23 @@ each algorithm, the required commands/keywords are given, being described in *"CheckingPoint", "ObservationOperator"* - No option + SetDebug + This key requires the activation, or not, of the debug mode during the + function evaluation. The default is True, the choices are True of False. + +**"FunctionRepetitionTest"** + + *Required commands* + *"CheckingPoint", + "ObservationOperator"* + + NumberOfRepetition + This key indicates the number of time to repeat the function evaluation. The + default is 2. + + SetDebug + This key requires the activation, or not, of the debug mode during the + function evaluation. The default is True, the choices are True of False. **"GradientTest"** diff --git a/src/daComposant/daAlgorithms/FunctionRepetitionTest.py b/src/daComposant/daAlgorithms/FunctionRepetitionTest.py new file mode 100644 index 0000000..53eefef --- /dev/null +++ b/src/daComposant/daAlgorithms/FunctionRepetitionTest.py @@ -0,0 +1,123 @@ +#-*-coding:iso-8859-1-*- +# +# Copyright (C) 2008-2013 EDF R&D +# +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +# +# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com +# +# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D + +import logging +from daCore import BasicObjects, PlatformInfo +m = PlatformInfo.SystemUsage() + +import numpy + +# ============================================================================== +class ElementaryAlgorithm(BasicObjects.Algorithm): + def __init__(self): + BasicObjects.Algorithm.__init__(self, "FUNCTIONREPETITIONTEST") + self.defineRequiredParameter( + name = "NumberOfRepetition", + default = 2, + typecast = int, + message = "Nombre de fois où l'exécution de la fonction est répétée", + minval = 1, + ) + self.defineRequiredParameter( + name = "ResultTitle", + default = "", + typecast = str, + message = "Titre du tableau et de la figure", + ) + self.defineRequiredParameter( + name = "SetDebug", + default = True, + typecast = bool, + message = "Activation du mode debug lors de l'exécution", + ) + + def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): + logging.debug("%s Lancement"%self._name) + logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) + # + self.setParameters(Parameters) + # + Hm = HO["Direct"].appliedTo + # + Xn = numpy.asmatrix(numpy.ravel( Xb )).T + # + # ---------- + if len(self._parameters["ResultTitle"]) > 0: + msg = " ====" + "="*len(self._parameters["ResultTitle"]) + "====\n" + msg += " " + self._parameters["ResultTitle"] + "\n" + msg += " ====" + "="*len(self._parameters["ResultTitle"]) + "====\n" + print("%s"%msg) + # + msg = "===> Information before launching:\n" + msg += " -----------------------------\n" + msg += " Characteristics of input parameter X, internally converted:\n" + msg += " Type...............: %s\n"%type( Xn ) + msg += " Lenght of vector...: %i\n"%max(numpy.matrix( Xn ).shape) + msg += " Minimum value......: %.5e\n"%numpy.min( Xn ) + msg += " Maximum value......: %.5e\n"%numpy.max( Xn ) + msg += " Mean of vector.....: %.5e\n"%numpy.mean( Xn ) + msg += " Standard error.....: %.5e\n"%numpy.std( Xn ) + msg += " L2 norm of vector..: %.5e\n"%numpy.linalg.norm( Xn ) + print(msg) + # + if self._parameters["SetDebug"]: + CUR_LEVEL = logging.getLogger().getEffectiveLevel() + logging.getLogger().setLevel(logging.DEBUG) + print("===> Beginning of evaluation, activating debug\n") + else: + print("===> Beginning of evaluation, without activating debug\n") + # + # ---------- + for i in range(self._parameters["NumberOfRepetition"]): + print(" %s\n"%("-"*75,)) + print("===> Repetition step number %i on a total of %i\n"%(i+1,self._parameters["NumberOfRepetition"])) + print("===> Launching direct operator evaluation\n") + # + Y = Hm( Xn ) + # + print("\n===> End of direct operator evaluation\n") + # + msg = "===> Information after launching:\n" + msg += " ----------------------------\n" + msg += " Characteristics of output parameter Y, to compare to other calculations:\n" + msg += " Type...............: %s\n"%type( Y ) + msg += " Lenght of vector...: %i\n"%max(numpy.matrix( Y ).shape) + msg += " Minimum value......: %.5e\n"%numpy.min( Y ) + msg += " Maximum value......: %.5e\n"%numpy.max( Y ) + msg += " Mean of vector.....: %.5e\n"%numpy.mean( Y ) + msg += " Standard error.....: %.5e\n"%numpy.std( Y ) + msg += " L2 norm of vector..: %.5e\n"%numpy.linalg.norm( Y ) + print(msg) + # + print(" %s\n"%("-"*75,)) + if self._parameters["SetDebug"]: + print("===> End evaluation, deactivating debug if necessary\n") + logging.getLogger().setLevel(CUR_LEVEL) + # + logging.debug("%s Taille mémoire utilisée de %.1f Mo"%(self._name, m.getUsedMemory("M"))) + logging.debug("%s Terminé"%self._name) + # + return 0 + +# ============================================================================== +if __name__ == "__main__": + print '\n AUTODIAGNOSTIC \n' diff --git a/src/daSalome/daYacsSchemaCreator/infos_daComposant.py b/src/daSalome/daYacsSchemaCreator/infos_daComposant.py index 8b6d787..2b3d6b7 100644 --- a/src/daSalome/daYacsSchemaCreator/infos_daComposant.py +++ b/src/daSalome/daYacsSchemaCreator/infos_daComposant.py @@ -73,6 +73,7 @@ CheckAlgos = [ "LinearityTest", "GradientTest", "AdjointTest", + "FunctionRepetitionTest", ] AlgoDataRequirements = {} @@ -140,6 +141,10 @@ AlgoDataRequirements["AdjointTest"] = [ "CheckingPoint", "ObservationOperator", ] +AlgoDataRequirements["FunctionRepetitionTest"] = [ + "CheckingPoint", + "ObservationOperator", + ] AlgoType = {} AlgoType["3DVAR"] = "Optim"