From: Jean-Philippe ARGAUD Date: Wed, 16 Nov 2016 21:23:28 +0000 (+0100) Subject: Management of mean/std computation precision and messages X-Git-Tag: V8_2_0~7 X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=18f090640980c7e74adbf5fb4721fc3c86da5d17;p=modules%2Fadao.git Management of mean/std computation precision and messages --- diff --git a/bin/AdaoCatalogGenerator.py b/bin/AdaoCatalogGenerator.py index 7893cee..b996a5a 100644 --- a/bin/AdaoCatalogGenerator.py +++ b/bin/AdaoCatalogGenerator.py @@ -406,12 +406,12 @@ for assim_data_input_name in infos.AssimDataDict.keys(): decl_choices = "" decl_opts = "" if infos.AssimDataDefaultDict[assim_data_input_name] in infos.StoredAssimData: - storage = " Stored = SIMP(statut=\"o\", typ = \"I\", into=(0, 1), defaut=0, fr=\"Choix de stockage interne ou non du concept parent\", ang=\"Choice of the storage or not of the parent concept\")," + storage = " Stored = SIMP(statut=\"f\", typ = \"I\", into=(0, 1), defaut=0, fr=\"Choix de stockage interne ou non du concept parent\", ang=\"Choice of the storage or not of the parent concept\")," for choice in infos.AssimDataDict[assim_data_input_name]: choices += "\"" + choice + "\", " decl_choices += assim_data_choice.substitute(choice_name = choice) if choice in infos.StoredAssimData: - storage = " Stored = SIMP(statut=\"o\", typ = \"I\", into=(0, 1), defaut=0, fr=\"Choix de stockage interne ou non du concept parent\", ang=\"Choice of the storage or not of the parent concept\")," + storage = " Stored = SIMP(statut=\"f\", typ = \"I\", into=(0, 1), defaut=0, fr=\"Choix de stockage interne ou non du concept parent\", ang=\"Choice of the storage or not of the parent concept\")," default_choice = "\"" + infos.AssimDataDefaultDict[assim_data_input_name] + "\"" mem_file.write(assim_data_method.substitute(assim_name = assim_data_input_name, diff --git a/src/daComposant/daAlgorithms/AdjointTest.py b/src/daComposant/daAlgorithms/AdjointTest.py index 89cd6b2..8e772cb 100644 --- a/src/daComposant/daAlgorithms/AdjointTest.py +++ b/src/daComposant/daAlgorithms/AdjointTest.py @@ -123,6 +123,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): qui doit rester constamment egal a zero a la precision du calcul. On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul. Y doit etre dans l'image de F. S'il n'est pas donne, on prend Y = F(X). + + Remarque : les nombres inferieurs a 1.e-16 (environ) representent un zero + a la precision machine. """ # if len(self._parameters["ResultTitle"]) > 0: diff --git a/src/daComposant/daAlgorithms/FunctionTest.py b/src/daComposant/daAlgorithms/FunctionTest.py index 9d08a9e..58f3f3d 100644 --- a/src/daComposant/daAlgorithms/FunctionTest.py +++ b/src/daComposant/daAlgorithms/FunctionTest.py @@ -21,8 +21,9 @@ # Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D import logging -from daCore import BasicObjects +from daCore import BasicObjects, PlatformInfo import numpy, copy +mfp = PlatformInfo.PlatformInfo().MaximumPrecision() # ============================================================================== class ElementaryAlgorithm(BasicObjects.Algorithm): @@ -86,8 +87,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msg += (" Lenght of vector...: %i\n")%max(numpy.matrix( Xn ).shape) msg += (" Minimum value......: %."+str(_p)+"e\n")%numpy.min( Xn ) msg += (" Maximum value......: %."+str(_p)+"e\n")%numpy.max( Xn ) - msg += (" Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Xn ) - msg += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Xn ) + msg += (" Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Xn, dtype=mfp ) + msg += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Xn, dtype=mfp ) msg += (" L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Xn ) print(msg) # @@ -120,8 +121,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msg += (" Lenght of vector...: %i\n")%max(numpy.matrix( Yn ).shape) msg += (" Minimum value......: %."+str(_p)+"e\n")%numpy.min( Yn ) msg += (" Maximum value......: %."+str(_p)+"e\n")%numpy.max( Yn ) - msg += (" Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Yn ) - msg += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Yn ) + msg += (" Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Yn, dtype=mfp ) + msg += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Yn, dtype=mfp ) msg += (" L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Yn ) print(msg) if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]: @@ -142,27 +143,28 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if self._parameters["NumberOfRepetition"] > 1: msg = (" %s\n"%("-"*75,)) msg += ("\n===> Statistical analysis of the outputs obtained throught repeated evaluations\n") + msg += ("\n (Remark: numbers that are (about) under 1.e-16 represent 0 to machine precision)\n") Yy = numpy.array( Ys ) msg += ("\n Characteristics of the whole set of outputs Y:\n") msg += (" Number of evaluations.........................: %i\n")%len( Ys ) msg += (" Minimum value of the whole set of outputs.....: %."+str(_p)+"e\n")%numpy.min( Yy ) msg += (" Maximum value of the whole set of outputs.....: %."+str(_p)+"e\n")%numpy.max( Yy ) - msg += (" Mean of vector of the whole set of outputs....: %."+str(_p)+"e\n")%numpy.mean( Yy ) - msg += (" Standard error of the whole set of outputs....: %."+str(_p)+"e\n")%numpy.std( Yy ) - Ym = numpy.mean( numpy.array( Ys ), axis=0 ) + msg += (" Mean of vector of the whole set of outputs....: %."+str(_p)+"e\n")%numpy.mean( Yy, dtype=mfp ) + msg += (" Standard error of the whole set of outputs....: %."+str(_p)+"e\n")%numpy.std( Yy, dtype=mfp ) + Ym = numpy.mean( numpy.array( Ys ), axis=0, dtype=mfp ) msg += ("\n Characteristics of the vector Ym, mean of the outputs Y:\n") msg += (" Size of the mean of the outputs...............: %i\n")%Ym.size msg += (" Minimum value of the mean of the outputs......: %."+str(_p)+"e\n")%numpy.min( Ym ) msg += (" Maximum value of the mean of the outputs......: %."+str(_p)+"e\n")%numpy.max( Ym ) - msg += (" Mean of the mean of the outputs...............: %."+str(_p)+"e\n")%numpy.mean( Ym ) - msg += (" Standard error of the mean of the outputs.....: %."+str(_p)+"e\n")%numpy.std( Ym ) - Ye = numpy.mean( numpy.array( Ys ) - Ym, axis=0 ) + msg += (" Mean of the mean of the outputs...............: %."+str(_p)+"e\n")%numpy.mean( Ym, dtype=mfp ) + msg += (" Standard error of the mean of the outputs.....: %."+str(_p)+"e\n")%numpy.std( Ym, dtype=mfp ) + Ye = numpy.mean( numpy.array( Ys ) - Ym, axis=0, dtype=mfp ) msg += "\n Characteristics of the mean of the differences between the outputs Y and their mean Ym:\n" msg += (" Size of the mean of the differences...........: %i\n")%Ym.size msg += (" Minimum value of the mean of the differences..: %."+str(_p)+"e\n")%numpy.min( Ye ) msg += (" Maximum value of the mean of the differences..: %."+str(_p)+"e\n")%numpy.max( Ye ) - msg += (" Mean of the mean of the differences...........: %."+str(_p)+"e\n")%numpy.mean( Ye ) - msg += (" Standard error of the mean of the differences.: %."+str(_p)+"e\n")%numpy.std( Ye ) + msg += (" Mean of the mean of the differences...........: %."+str(_p)+"e\n")%numpy.mean( Ye, dtype=mfp ) + msg += (" Standard error of the mean of the differences.: %."+str(_p)+"e\n")%numpy.std( Ye, dtype=mfp ) msg += ("\n %s\n"%("-"*75,)) print(msg) # diff --git a/src/daComposant/daAlgorithms/GradientTest.py b/src/daComposant/daAlgorithms/GradientTest.py index 24c5f00..e4e2fcb 100644 --- a/src/daComposant/daAlgorithms/GradientTest.py +++ b/src/daComposant/daAlgorithms/GradientTest.py @@ -163,6 +163,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): faite dans le calcul du terme GradientF_X. On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul. + + Remarque : les nombres inferieurs a 1.e-16 (environ) representent un zero + a la precision machine. """ if self._parameters["ResiduFormula"] == "TaylorOnNorm": __entete = " i Alpha ||X|| ||F(X)|| ||F(X+dX)|| ||dX|| ||F(X+dX)-F(X)|| ||F(X+dX)-F(X)||/||dX|| R(Alpha) log( R ) " @@ -188,6 +191,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): grandeur de ||F(X)||. On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul. + + Remarque : les nombres inferieurs a 1.e-16 (environ) representent un zero + a la precision machine. """ if self._parameters["ResiduFormula"] == "Norm": __entete = " i Alpha ||X|| ||F(X)|| ||F(X+dX)|| ||dX|| ||F(X+dX)-F(X)|| ||F(X+dX)-F(X)||/||dX|| R(Alpha) log( R ) " @@ -201,6 +207,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): qui doit rester constant jusqu'à ce que l'on atteigne la précision du calcul. On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul. + + Remarque : les nombres inferieurs a 1.e-16 (environ) representent un zero + a la precision machine. """ # if len(self._parameters["ResultTitle"]) > 0: diff --git a/src/daComposant/daAlgorithms/TangentTest.py b/src/daComposant/daAlgorithms/TangentTest.py index 1803ecc..8ff1f8e 100644 --- a/src/daComposant/daAlgorithms/TangentTest.py +++ b/src/daComposant/daAlgorithms/TangentTest.py @@ -158,6 +158,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): l'on atteigne la précision du calcul. On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul. + + Remarque : les nombres inferieurs a 1.e-16 (environ) representent un zero + a la precision machine. """ # if len(self._parameters["ResultTitle"]) > 0: diff --git a/src/daComposant/daCore/AssimilationStudy.py b/src/daComposant/daCore/AssimilationStudy.py index c325445..412e988 100644 --- a/src/daComposant/daCore/AssimilationStudy.py +++ b/src/daComposant/daCore/AssimilationStudy.py @@ -41,7 +41,7 @@ except ImportError: logging.debug("Fail initial import of scipy.optimize") import Persistence from BasicObjects import Operator, Covariance -from PlatformInfo import uniq +import PlatformInfo # ============================================================================== class AssimilationStudy: @@ -97,7 +97,7 @@ class AssimilationStudy: # qui est activée dans Persistence) self.__parent = os.path.abspath(os.path.join(os.path.dirname(__file__),"..")) sys.path.insert(0, self.__parent) - sys.path = uniq( sys.path ) # Conserve en unique exemplaire chaque chemin + sys.path = PlatformInfo.uniq( sys.path ) # Conserve en unique exemplaire chaque chemin # --------------------------------------------------------- def setBackground(self, @@ -465,12 +465,7 @@ class AssimilationStudy: # ----------------------------------------------------------- def setControlModel(self, - asFunction = {"Direct":None, "Tangent":None, "Adjoint":None, - "useApproximatedDerivatives":False, - "withCenteredDF" :False, - "withIncrement" :0.01, - "withdX" :None, - }, + asFunction = None, asMatrix = None, Scheduler = None, toBeStored = False, @@ -945,7 +940,7 @@ class AssimilationStudy: if not os.path.isfile(os.path.join(asPath,"daAlgorithms","__init__.py")): raise ValueError("The given \""+asPath+"/daAlgorithms\" path must contain a file named \"__init__.py\"") sys.path.insert(0, os.path.abspath(asPath)) - sys.path = uniq( sys.path ) # Conserve en unique exemplaire chaque chemin + sys.path = PlatformInfo.uniq( sys.path ) # Conserve en unique exemplaire chaque chemin return 1 def get_diagnostics_main_path(self): @@ -970,7 +965,7 @@ class AssimilationStudy: if not os.path.isfile(os.path.join(asPath,"daDiagnostics","__init__.py")): raise ValueError("The given \""+asPath+"/daDiagnostics\" path must contain a file named \"__init__.py\"") sys.path.insert(0, os.path.abspath(asPath)) - sys.path = uniq( sys.path ) # Conserve en unique exemplaire chaque chemin + sys.path = PlatformInfo.uniq( sys.path ) # Conserve en unique exemplaire chaque chemin return 1 # ----------------------------------------------------------- diff --git a/src/daComposant/daCore/PlatformInfo.py b/src/daComposant/daCore/PlatformInfo.py index 052e463..57438e3 100644 --- a/src/daComposant/daCore/PlatformInfo.py +++ b/src/daComposant/daCore/PlatformInfo.py @@ -108,6 +108,16 @@ class PlatformInfo(object): "Retourne la taille mémoire courante utilisée" return 1 + def MaximumPrecision(self): + "Retourne la precision maximale flottante pour Numpy" + import numpy + try: + x = numpy.array([1.,], dtype='float128') + mfp = 'float128' + except: + mfp = 'float64' + return mfp + def __str__(self): import version as dav return "%s %s (%s)"%(dav.name,dav.version,dav.date) diff --git a/src/daEficas/configuration_ADAO.py b/src/daEficas/configuration_ADAO.py index fee72b5..d23cb8c 100644 --- a/src/daEficas/configuration_ADAO.py +++ b/src/daEficas/configuration_ADAO.py @@ -45,7 +45,8 @@ class CONFIG(configuration.CONFIG_BASE): def __init__(self,appli,repIni): - self.labels_eficas = ['lang'] + # self.labels_eficas = ['lang'] + self.labels_eficas = ['lang','rep_cata','catalogues','closeAutreCommande','closeFrameRechercheCommande','closeEntete','taille'] configuration.CONFIG_BASE.__init__(self,appli,repIni) self.rep_user = os.environ["HOME"] diff --git a/src/daEficas/prefs_ADAO.py.in b/src/daEficas/prefs_ADAO.py.in index f91576c..041379d 100644 --- a/src/daEficas/prefs_ADAO.py.in +++ b/src/daEficas/prefs_ADAO.py.in @@ -39,3 +39,8 @@ initialdir=os.environ["PWD"] encoding='iso-8859-1' # lang indique la langue utilisee pour les chaines d'aide : fr ou ang lang='fr' +closeAutreCommande = True +closeFrameRechercheCommande = True +closeEntete = True +taille=800 + diff --git a/src/daSalome/daYacsSchemaCreator/methods.py b/src/daSalome/daYacsSchemaCreator/methods.py index 8dc97e1..b0c2f29 100644 --- a/src/daSalome/daYacsSchemaCreator/methods.py +++ b/src/daSalome/daYacsSchemaCreator/methods.py @@ -206,7 +206,10 @@ def create_yacs_proc(study_config): ADAO_Case.edAddChild(back_node) # Set content of the node back_node_script = back_node.getScript() - back_node_script += "stored = " + str(data_config["Stored"]) + "\n" + if "Stored" in data_config: + back_node_script += "stored = " + str(data_config["Stored"]) + "\n" + else: + back_node_script += "stored = 0\n" if key in init_config["Target"]: # Connect node with InitUserData back_node_script += "__builtins__[\"init_data\"] = init_data\n" @@ -237,7 +240,10 @@ def create_yacs_proc(study_config): ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data")) back_node_script += "# Import script and get data\n__import__(module_name)\nuser_script_module = sys.modules[module_name]\n\n" back_node_script += key + " = user_script_module." + key + "\n" - back_node_script += "stored = " + str(data_config["Stored"]) + "\n" + if "Stored" in data_config: + back_node_script += "stored = " + str(data_config["Stored"]) + "\n" + else: + back_node_script += "stored = 0\n" back_node.setScript(back_node_script) # Connect node with CreateAssimilationStudy CAS_node.edAddInputPort(key, t_pyobj) @@ -255,7 +261,10 @@ def create_yacs_proc(study_config): ADAO_Case.edAddChild(back_node) # Set content of the node back_node_script = back_node.getScript() - back_node_script += "stored = " + str(data_config["Stored"]) + "\n" + if "Stored" in data_config: + back_node_script += "stored = " + str(data_config["Stored"]) + "\n" + else: + back_node_script += "stored = 0\n" if key in init_config["Target"]: # Connect node with InitUserData back_node_script += "__builtins__[\"init_data\"] = init_data\n" + back_node_script @@ -286,7 +295,10 @@ def create_yacs_proc(study_config): ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data")) back_node_script += "# Import script and get data\n__import__(module_name)\nuser_script_module = sys.modules[module_name]\n\n" back_node_script += key + " = user_script_module." + key + "\n" - back_node_script += "stored = " + str(data_config["Stored"]) + "\n" + if "Stored" in data_config: + back_node_script += "stored = " + str(data_config["Stored"]) + "\n" + else: + back_node_script += "stored = 0\n" back_node.setScript(back_node_script) # Connect node with CreateAssimilationStudy CAS_node.edAddInputPort(key, t_pyobj) @@ -304,7 +316,10 @@ def create_yacs_proc(study_config): ADAO_Case.edAddChild(back_node) # Set content of the node back_node_script = back_node.getScript() - back_node_script += "stored = " + str(data_config["Stored"]) + "\n" + if "Stored" in data_config: + back_node_script += "stored = " + str(data_config["Stored"]) + "\n" + else: + back_node_script += "stored = 0\n" if key in init_config["Target"]: # Connect node with InitUserData back_node_script += "__builtins__[\"init_data\"] = init_data\n" @@ -328,7 +343,10 @@ def create_yacs_proc(study_config): ADAO_Case.edAddChild(back_node) # Set content of the node back_node_script = back_node.getScript() - back_node_script += "stored = " + str(data_config["Stored"]) + "\n" + if "Stored" in data_config: + back_node_script += "stored = " + str(data_config["Stored"]) + "\n" + else: + back_node_script += "stored = 0\n" if key in init_config["Target"]: # Connect node with InitUserData back_node_script += "__builtins__[\"init_data\"] = init_data\n"