decl_choices = ""
decl_opts = ""
if infos.AssimDataDefaultDict[assim_data_input_name] in infos.StoredAssimData:
- storage = " Stored = SIMP(statut=\"o\", typ = \"I\", into=(0, 1), defaut=0, fr=\"Choix de stockage interne ou non du concept parent\", ang=\"Choice of the storage or not of the parent concept\"),"
+ storage = " Stored = SIMP(statut=\"f\", typ = \"I\", into=(0, 1), defaut=0, fr=\"Choix de stockage interne ou non du concept parent\", ang=\"Choice of the storage or not of the parent concept\"),"
for choice in infos.AssimDataDict[assim_data_input_name]:
choices += "\"" + choice + "\", "
decl_choices += assim_data_choice.substitute(choice_name = choice)
if choice in infos.StoredAssimData:
- storage = " Stored = SIMP(statut=\"o\", typ = \"I\", into=(0, 1), defaut=0, fr=\"Choix de stockage interne ou non du concept parent\", ang=\"Choice of the storage or not of the parent concept\"),"
+ storage = " Stored = SIMP(statut=\"f\", typ = \"I\", into=(0, 1), defaut=0, fr=\"Choix de stockage interne ou non du concept parent\", ang=\"Choice of the storage or not of the parent concept\"),"
default_choice = "\"" + infos.AssimDataDefaultDict[assim_data_input_name] + "\""
mem_file.write(assim_data_method.substitute(assim_name = assim_data_input_name,
qui doit rester constamment egal a zero a la precision du calcul.
On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul.
Y doit etre dans l'image de F. S'il n'est pas donne, on prend Y = F(X).
+
+ Remarque : les nombres inferieurs a 1.e-16 (environ) representent un zero
+ a la precision machine.
"""
#
if len(self._parameters["ResultTitle"]) > 0:
# Author: Jean-Philippe Argaud, jean-philippe.argaud@edf.fr, EDF R&D
import logging
-from daCore import BasicObjects
+from daCore import BasicObjects, PlatformInfo
import numpy, copy
+mfp = PlatformInfo.PlatformInfo().MaximumPrecision()
# ==============================================================================
class ElementaryAlgorithm(BasicObjects.Algorithm):
msg += (" Lenght of vector...: %i\n")%max(numpy.matrix( Xn ).shape)
msg += (" Minimum value......: %."+str(_p)+"e\n")%numpy.min( Xn )
msg += (" Maximum value......: %."+str(_p)+"e\n")%numpy.max( Xn )
- msg += (" Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Xn )
- msg += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Xn )
+ msg += (" Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Xn, dtype=mfp )
+ msg += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Xn, dtype=mfp )
msg += (" L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Xn )
print(msg)
#
msg += (" Lenght of vector...: %i\n")%max(numpy.matrix( Yn ).shape)
msg += (" Minimum value......: %."+str(_p)+"e\n")%numpy.min( Yn )
msg += (" Maximum value......: %."+str(_p)+"e\n")%numpy.max( Yn )
- msg += (" Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Yn )
- msg += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Yn )
+ msg += (" Mean of vector.....: %."+str(_p)+"e\n")%numpy.mean( Yn, dtype=mfp )
+ msg += (" Standard error.....: %."+str(_p)+"e\n")%numpy.std( Yn, dtype=mfp )
msg += (" L2 norm of vector..: %."+str(_p)+"e\n")%numpy.linalg.norm( Yn )
print(msg)
if "SimulatedObservationAtCurrentState" in self._parameters["StoreSupplementaryCalculations"]:
if self._parameters["NumberOfRepetition"] > 1:
msg = (" %s\n"%("-"*75,))
msg += ("\n===> Statistical analysis of the outputs obtained throught repeated evaluations\n")
+ msg += ("\n (Remark: numbers that are (about) under 1.e-16 represent 0 to machine precision)\n")
Yy = numpy.array( Ys )
msg += ("\n Characteristics of the whole set of outputs Y:\n")
msg += (" Number of evaluations.........................: %i\n")%len( Ys )
msg += (" Minimum value of the whole set of outputs.....: %."+str(_p)+"e\n")%numpy.min( Yy )
msg += (" Maximum value of the whole set of outputs.....: %."+str(_p)+"e\n")%numpy.max( Yy )
- msg += (" Mean of vector of the whole set of outputs....: %."+str(_p)+"e\n")%numpy.mean( Yy )
- msg += (" Standard error of the whole set of outputs....: %."+str(_p)+"e\n")%numpy.std( Yy )
- Ym = numpy.mean( numpy.array( Ys ), axis=0 )
+ msg += (" Mean of vector of the whole set of outputs....: %."+str(_p)+"e\n")%numpy.mean( Yy, dtype=mfp )
+ msg += (" Standard error of the whole set of outputs....: %."+str(_p)+"e\n")%numpy.std( Yy, dtype=mfp )
+ Ym = numpy.mean( numpy.array( Ys ), axis=0, dtype=mfp )
msg += ("\n Characteristics of the vector Ym, mean of the outputs Y:\n")
msg += (" Size of the mean of the outputs...............: %i\n")%Ym.size
msg += (" Minimum value of the mean of the outputs......: %."+str(_p)+"e\n")%numpy.min( Ym )
msg += (" Maximum value of the mean of the outputs......: %."+str(_p)+"e\n")%numpy.max( Ym )
- msg += (" Mean of the mean of the outputs...............: %."+str(_p)+"e\n")%numpy.mean( Ym )
- msg += (" Standard error of the mean of the outputs.....: %."+str(_p)+"e\n")%numpy.std( Ym )
- Ye = numpy.mean( numpy.array( Ys ) - Ym, axis=0 )
+ msg += (" Mean of the mean of the outputs...............: %."+str(_p)+"e\n")%numpy.mean( Ym, dtype=mfp )
+ msg += (" Standard error of the mean of the outputs.....: %."+str(_p)+"e\n")%numpy.std( Ym, dtype=mfp )
+ Ye = numpy.mean( numpy.array( Ys ) - Ym, axis=0, dtype=mfp )
msg += "\n Characteristics of the mean of the differences between the outputs Y and their mean Ym:\n"
msg += (" Size of the mean of the differences...........: %i\n")%Ym.size
msg += (" Minimum value of the mean of the differences..: %."+str(_p)+"e\n")%numpy.min( Ye )
msg += (" Maximum value of the mean of the differences..: %."+str(_p)+"e\n")%numpy.max( Ye )
- msg += (" Mean of the mean of the differences...........: %."+str(_p)+"e\n")%numpy.mean( Ye )
- msg += (" Standard error of the mean of the differences.: %."+str(_p)+"e\n")%numpy.std( Ye )
+ msg += (" Mean of the mean of the differences...........: %."+str(_p)+"e\n")%numpy.mean( Ye, dtype=mfp )
+ msg += (" Standard error of the mean of the differences.: %."+str(_p)+"e\n")%numpy.std( Ye, dtype=mfp )
msg += ("\n %s\n"%("-"*75,))
print(msg)
#
faite dans le calcul du terme GradientF_X.
On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul.
+
+ Remarque : les nombres inferieurs a 1.e-16 (environ) representent un zero
+ a la precision machine.
"""
if self._parameters["ResiduFormula"] == "TaylorOnNorm":
__entete = " i Alpha ||X|| ||F(X)|| ||F(X+dX)|| ||dX|| ||F(X+dX)-F(X)|| ||F(X+dX)-F(X)||/||dX|| R(Alpha) log( R ) "
grandeur de ||F(X)||.
On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul.
+
+ Remarque : les nombres inferieurs a 1.e-16 (environ) representent un zero
+ a la precision machine.
"""
if self._parameters["ResiduFormula"] == "Norm":
__entete = " i Alpha ||X|| ||F(X)|| ||F(X+dX)|| ||dX|| ||F(X+dX)-F(X)|| ||F(X+dX)-F(X)||/||dX|| R(Alpha) log( R ) "
qui doit rester constant jusqu'à ce que l'on atteigne la précision du calcul.
On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul.
+
+ Remarque : les nombres inferieurs a 1.e-16 (environ) representent un zero
+ a la precision machine.
"""
#
if len(self._parameters["ResultTitle"]) > 0:
l'on atteigne la précision du calcul.
On prend dX0 = Normal(0,X) et dX = Alpha*dX0. F est le code de calcul.
+
+ Remarque : les nombres inferieurs a 1.e-16 (environ) representent un zero
+ a la precision machine.
"""
#
if len(self._parameters["ResultTitle"]) > 0:
logging.debug("Fail initial import of scipy.optimize")
import Persistence
from BasicObjects import Operator, Covariance
-from PlatformInfo import uniq
+import PlatformInfo
# ==============================================================================
class AssimilationStudy:
# qui est activée dans Persistence)
self.__parent = os.path.abspath(os.path.join(os.path.dirname(__file__),".."))
sys.path.insert(0, self.__parent)
- sys.path = uniq( sys.path ) # Conserve en unique exemplaire chaque chemin
+ sys.path = PlatformInfo.uniq( sys.path ) # Conserve en unique exemplaire chaque chemin
# ---------------------------------------------------------
def setBackground(self,
# -----------------------------------------------------------
def setControlModel(self,
- asFunction = {"Direct":None, "Tangent":None, "Adjoint":None,
- "useApproximatedDerivatives":False,
- "withCenteredDF" :False,
- "withIncrement" :0.01,
- "withdX" :None,
- },
+ asFunction = None,
asMatrix = None,
Scheduler = None,
toBeStored = False,
if not os.path.isfile(os.path.join(asPath,"daAlgorithms","__init__.py")):
raise ValueError("The given \""+asPath+"/daAlgorithms\" path must contain a file named \"__init__.py\"")
sys.path.insert(0, os.path.abspath(asPath))
- sys.path = uniq( sys.path ) # Conserve en unique exemplaire chaque chemin
+ sys.path = PlatformInfo.uniq( sys.path ) # Conserve en unique exemplaire chaque chemin
return 1
def get_diagnostics_main_path(self):
if not os.path.isfile(os.path.join(asPath,"daDiagnostics","__init__.py")):
raise ValueError("The given \""+asPath+"/daDiagnostics\" path must contain a file named \"__init__.py\"")
sys.path.insert(0, os.path.abspath(asPath))
- sys.path = uniq( sys.path ) # Conserve en unique exemplaire chaque chemin
+ sys.path = PlatformInfo.uniq( sys.path ) # Conserve en unique exemplaire chaque chemin
return 1
# -----------------------------------------------------------
"Retourne la taille mémoire courante utilisée"
return 1
+ def MaximumPrecision(self):
+ "Retourne la precision maximale flottante pour Numpy"
+ import numpy
+ try:
+ x = numpy.array([1.,], dtype='float128')
+ mfp = 'float128'
+ except:
+ mfp = 'float64'
+ return mfp
+
def __str__(self):
import version as dav
return "%s %s (%s)"%(dav.name,dav.version,dav.date)
def __init__(self,appli,repIni):
- self.labels_eficas = ['lang']
+ # self.labels_eficas = ['lang']
+ self.labels_eficas = ['lang','rep_cata','catalogues','closeAutreCommande','closeFrameRechercheCommande','closeEntete','taille']
configuration.CONFIG_BASE.__init__(self,appli,repIni)
self.rep_user = os.environ["HOME"]
encoding='iso-8859-1'
# lang indique la langue utilisee pour les chaines d'aide : fr ou ang
lang='fr'
+closeAutreCommande = True
+closeFrameRechercheCommande = True
+closeEntete = True
+taille=800
+
ADAO_Case.edAddChild(back_node)
# Set content of the node
back_node_script = back_node.getScript()
- back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+ if "Stored" in data_config:
+ back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+ else:
+ back_node_script += "stored = 0\n"
if key in init_config["Target"]:
# Connect node with InitUserData
back_node_script += "__builtins__[\"init_data\"] = init_data\n"
ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data"))
back_node_script += "# Import script and get data\n__import__(module_name)\nuser_script_module = sys.modules[module_name]\n\n"
back_node_script += key + " = user_script_module." + key + "\n"
- back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+ if "Stored" in data_config:
+ back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+ else:
+ back_node_script += "stored = 0\n"
back_node.setScript(back_node_script)
# Connect node with CreateAssimilationStudy
CAS_node.edAddInputPort(key, t_pyobj)
ADAO_Case.edAddChild(back_node)
# Set content of the node
back_node_script = back_node.getScript()
- back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+ if "Stored" in data_config:
+ back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+ else:
+ back_node_script += "stored = 0\n"
if key in init_config["Target"]:
# Connect node with InitUserData
back_node_script += "__builtins__[\"init_data\"] = init_data\n" + back_node_script
ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), back_node.getInputPort("init_data"))
back_node_script += "# Import script and get data\n__import__(module_name)\nuser_script_module = sys.modules[module_name]\n\n"
back_node_script += key + " = user_script_module." + key + "\n"
- back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+ if "Stored" in data_config:
+ back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+ else:
+ back_node_script += "stored = 0\n"
back_node.setScript(back_node_script)
# Connect node with CreateAssimilationStudy
CAS_node.edAddInputPort(key, t_pyobj)
ADAO_Case.edAddChild(back_node)
# Set content of the node
back_node_script = back_node.getScript()
- back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+ if "Stored" in data_config:
+ back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+ else:
+ back_node_script += "stored = 0\n"
if key in init_config["Target"]:
# Connect node with InitUserData
back_node_script += "__builtins__[\"init_data\"] = init_data\n"
ADAO_Case.edAddChild(back_node)
# Set content of the node
back_node_script = back_node.getScript()
- back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+ if "Stored" in data_config:
+ back_node_script += "stored = " + str(data_config["Stored"]) + "\n"
+ else:
+ back_node_script += "stored = 0\n"
if key in init_config["Target"]:
# Connect node with InitUserData
back_node_script += "__builtins__[\"init_data\"] = init_data\n"