dnl Author: André Ribes, andre.ribes@edf.fr, EDF R&D
AC_DEFUN([CHECK_SCIPY],[
-AC_REQUIRE([CHECK_PYTHON])dnl
+AC_REQUIRE([CHECK_PYTHON3])dnl
scipy_ok=no
-scipydir=`$PYTHON -c "import scipy;print scipy.get_include()" 2>/dev/null`
+scipydir=`$PYTHON -c "import scipy;print(scipy.get_include())" 2>/dev/null`
if test -d "$scipydir"; then
scipy_ok=yes
import traceback
import sys
import string
-import StringIO
+import io
import module_version
AdjointOperatorInNS.info = u"The Python file has to contain explicitly an \\"AdjointOperator\\" function definition with only one pair of vectors as argument."
"""%(module_version.name,module_version.version)
-# Important : validators=[...] pour que les conditions soient traitées simultanément, en "ET", et pas en "OU" (choisi dans le cas du tuple à la place de la liste)
+# Important : validators=[...] pour que les conditions soient traitées simultanément, en "ET", et pas en "OU" (choisi dans le cas du tuple à la place de la liste)
# validators=[OnlyStr(), FileExtVal('py'), FunctionVal(fv)]
data_method = """
def F_${data_name}(statut, fv=NoCheckInNS) : return FACT(
statut = statut,
FROM = SIMP(statut = "o", typ = "TXM", into=(${data_into}), defaut=${data_default}),
SCRIPT_DATA = BLOC ( condition = " FROM in ( 'Script', ) ",
- SCRIPT_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py'), FunctionVal(fv)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant si nécessaire la définition d'une variable interne de même nom que le concept parent", ang="Waiting for a script file name, with or without the full path to find it, containing if necessary the definition of an internal variable of the same name as the parent concept"),
+ SCRIPT_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py'), FunctionVal(fv)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant si nécessaire la définition d'une variable interne de même nom que le concept parent", ang="Waiting for a script file name, with or without the full path to find it, containing if necessary the definition of an internal variable of the same name as the parent concept"),
),
STRING_DATA = BLOC ( condition = " FROM in ( 'String', ) ",
- STRING = SIMP(statut = "o", typ = "TXM",${ms_default} fr="En attente d'une chaine de caractères entre guillements. Pour construire un vecteur ou une matrice, ce doit être une suite de nombres, utilisant un espace ou une virgule pour séparer deux éléments et un point-virgule pour séparer deux lignes", ang="Waiting for a string in quotes. To build a vector or a matrix, it has to be a float serie, using a space or comma to separate two elements in a line, a semi-colon to separate rows"),
+ STRING = SIMP(statut = "o", typ = "TXM",${ms_default} fr="En attente d'une chaine de caractères entre guillements. Pour construire un vecteur ou une matrice, ce doit être une suite de nombres, utilisant un espace ou une virgule pour séparer deux éléments et un point-virgule pour séparer deux lignes", ang="Waiting for a string in quotes. To build a vector or a matrix, it has to be a float serie, using a space or comma to separate two elements in a line, a semi-colon to separate rows"),
),
SCRIPTWITHFUNCTIONS_DATA = BLOC ( condition = " FROM in ( 'ScriptWithFunctions', ) ",
- SCRIPTWITHFUNCTIONS_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py'), FunctionVal(DirectOperatorInNS), FunctionVal(TangentOperatorInNS), FunctionVal(AdjointOperatorInNS)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant en variables internes trois fonctions de calcul nommées DirectOperator, TangentOperator et AdjointOperator", ang="Waiting for a script file name, with or without the full path to find it, containing as internal variables three computation functions named DirectOperator, TangentOperator and AdjointOperator"),
+ SCRIPTWITHFUNCTIONS_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py'), FunctionVal(DirectOperatorInNS), FunctionVal(TangentOperatorInNS), FunctionVal(AdjointOperatorInNS)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant en variables internes trois fonctions de calcul nommées DirectOperator, TangentOperator et AdjointOperator", ang="Waiting for a script file name, with or without the full path to find it, containing as internal variables three computation functions named DirectOperator, TangentOperator and AdjointOperator"),
),
SCRIPTWITHONEFUNCTION_DATA = BLOC ( condition = " FROM in ( 'ScriptWithOneFunction', ) ",
- SCRIPTWITHONEFUNCTION_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py'), FunctionVal(DirectOperatorInNS)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant en variable interne une seule fonction de calcul nommée DirectOperator", ang="Waiting for a script file name, with or without the full path to find it, containing as internal variable only one function named DirectOperator"),
- DifferentialIncrement = SIMP(statut="o", typ = "R", val_min=0, val_max=1, defaut=0.01, fr="Incrément de la perturbation dX pour calculer la dérivée, construite en multipliant X par l'incrément en évitant les valeurs nulles", ang="Increment of dX perturbation to calculate the derivative, build multiplying X by the increment avoiding null values"),
- CenteredFiniteDifference = SIMP(statut="o", typ = "I", into=(0, 1), defaut=0, fr="Formulation centrée (1) ou décentrée (0) pour la méthode des différences finies", ang="Centered (1) or uncentered (0) formulation for the finite differences method"),
- EnableMultiProcessing = SIMP(statut="f", typ = "I", into=(0, 1), defaut=0, fr="Calculs élémentaires effectués en séquentiel (0) ou en parallèle (1) dans la méthode des différences finies", ang="Elementary calculations done sequentially (0) or in parallel (1) in the finite differences method"),
- NumberOfProcesses = SIMP(statut="f", typ = "I", val_min=0, defaut=0, fr="Nombre de processus parallèles, 0 pour un contrôle automatique", ang="Number of parallel processes, 0 for automatic control"),
+ SCRIPTWITHONEFUNCTION_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py'), FunctionVal(DirectOperatorInNS)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant en variable interne une seule fonction de calcul nommée DirectOperator", ang="Waiting for a script file name, with or without the full path to find it, containing as internal variable only one function named DirectOperator"),
+ DifferentialIncrement = SIMP(statut="o", typ = "R", val_min=0, val_max=1, defaut=0.01, fr="Incrément de la perturbation dX pour calculer la dérivée, construite en multipliant X par l'incrément en évitant les valeurs nulles", ang="Increment of dX perturbation to calculate the derivative, build multiplying X by the increment avoiding null values"),
+ CenteredFiniteDifference = SIMP(statut="o", typ = "I", into=(0, 1), defaut=0, fr="Formulation centrée (1) ou décentrée (0) pour la méthode des différences finies", ang="Centered (1) or uncentered (0) formulation for the finite differences method"),
+ EnableMultiProcessing = SIMP(statut="f", typ = "I", into=(0, 1), defaut=0, fr="Calculs élémentaires effectués en séquentiel (0) ou en parallèle (1) dans la méthode des différences finies", ang="Elementary calculations done sequentially (0) or in parallel (1) in the finite differences method"),
+ NumberOfProcesses = SIMP(statut="f", typ = "I", val_min=0, defaut=0, fr="Nombre de processus parallèles, 0 pour un contrôle automatique", ang="Number of parallel processes, 0 for automatic control"),
),
SCRIPTWITHSWITCH_DATA = BLOC ( condition = " FROM in ( 'ScriptWithSwitch', ) ",
SCRIPTWITHSWITCH_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py')], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant un switch pour les calculs direct, tangent et adjoint", ang="Waiting for a script file name, with or without the full path to find it, containing a switch for direct, tangent and adjoint computations"),
#----------- Begin generation script -----------#
-print "-- Starting AdaoCalatogGenerator.py --"
+print("-- Starting AdaoCalatogGenerator.py --")
try:
import daEficas
catalog_name = args[1]
# Generates into a string
-mem_file = StringIO.StringIO()
+mem_file = io.StringIO()
# Start file
from time import strftime
# Step 1: A partir des infos, on cree les fonctions qui vont permettre
# d'entrer les donnees utilisateur
-for data_input_name in infos.DataTypeDict.keys():
+for data_input_name in infos.DataTypeDict:
logging.debug('A data input Type is found: ' + data_input_name)
data_name = data_input_name
data_into = ""
# On choisit le default
data_default = "\"" + infos.DataTypeDefaultDict[data_input_name] + "\""
- if infos.DataSValueDefaultDict.has_key(data_input_name):
+ if data_input_name in infos.DataSValueDefaultDict:
ms_default = " defaut=\"" + infos.DataSValueDefaultDict[data_input_name] + "\","
mem_file.write(data_method.substitute(data_name = data_name,
algos_names = algos_names+check_names))
# Step 2: On cree les fonctions qui permettent de rentrer les donnees des algorithmes
-for assim_data_input_name in infos.AssimDataDict.keys():
+for assim_data_input_name in infos.AssimDataDict:
logging.debug("An input function data input is found: " + assim_data_input_name)
- # assim_name = assim_data_input_name
+ #Â assim_name = assim_data_input_name
storage = ""
choices = ""
default_choice = ""
default_choice = default_choice))
# Step 3: On ajoute les fonctions representant les options possibles
-for opt_name in infos.OptDict.keys():
+for opt_name in infos.OptDict:
logging.debug("An optional node is found: " + opt_name)
data_name = opt_name
data_into = ""
# On choisit le default
data_default = "\"" + infos.OptDefaultDict[opt_name] + "\""
- if infos.DataSValueDefaultDict.has_key(opt_name):
+ if opt_name in infos.DataSValueDefaultDict:
ms_default = " defaut=\"" + infos.DataSValueDefaultDict[opt_name] + "\","
mem_file.write(data_method.substitute(data_name = data_name,
assim_study_object = daCore.AssimilationStudy.AssimilationStudy()
assim_study_object.setAlgorithm(choice=algo)
par_dict = assim_study_object.getAlgorithmParameters(False)
- par_keys = par_dict.keys()
- par_keys.sort()
+ par_keys = sorted(par_dict.keys())
algo_parameters = ""
for pn in par_keys:
if pn in ("StoreInternalVariables", "PlotAndSave", "ResultFile", "ResultTitle", "ResultLabel"): continue # Cles a supprimer
pt = par_dict[pn]["typecast"]
pd = par_dict[pn]["default"]
pm = par_dict[pn]["message"]
- if par_dict[pn].has_key("minval") and par_dict[pn]["minval"] is not None:
+ if "minval" in par_dict[pn] and par_dict[pn]["minval"] is not None:
vi = ", val_min=%s"%par_dict[pn]["minval"]
else:
vi = ""
- if par_dict[pn].has_key("minval") and par_dict[pn]["maxval"] is not None:
+ if "minval" in par_dict[pn] and par_dict[pn]["maxval"] is not None:
vs = ", val_max=%s"%par_dict[pn]["maxval"]
else:
vs = ""
algo_parameters += """ %s = SIMP(statut="f", typ="R"%s%s, min=1, max=1, defaut=%s, fr="%s"),\n"""%(pn,vi,vs,float(pd),pm)
elif pt is bool:
algo_parameters += """ %s = SIMP(statut="f", typ="I", min=1, max=1, defaut=%s, fr="%s"),\n"""%(pn,int(pd),pm)
- elif pt is str and par_dict[pn].has_key("listval"):
+ elif pt is str and "listval" in par_dict[pn]:
algo_parameters += """ %s = SIMP(statut="f", typ="TXM", min=1, max=1, defaut="%s", into=%s, fr="%s"),\n"""%(pn,str(pd),par_dict[pn]["listval"],pm)
- elif pt is tuple and par_dict[pn].has_key("listval"):
+ elif pt is tuple and "listval" in par_dict[pn]:
algo_parameters += """ %s = SIMP(statut="f", typ="TXM", max="**", into=%s, fr="%s"),\n"""%(pn,par_dict[pn]["listval"],pm)
else:
algo_parameters += """ %s = SIMP(statut="f", typ="TXM", fr="%s"),\n"""%(pn,pm)
AC_PROG_CC
AC_PROG_CXX
+AM_PATH_PYTHON([3.4])
CHECK_KERNEL
CHECK_OMNIORB
CHECK_EFICAS
master_doc = 'index'
# General information about the project.
-project = u'%s'%module_version.name
-copyright = u'2008-%s, Jean-Philippe ARGAUD'%module_version.year
+project = '%s'%module_version.name
+copyright = '2008-%s, Jean-Philippe ARGAUD'%module_version.year
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
- ('index', 'ADAO.tex', u'ADAO documentation',
- u'Jean-Philippe ARGAUD', 'manual'),
+ ('index', 'ADAO.tex', 'ADAO documentation',
+ 'Jean-Philippe ARGAUD', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# Bibliographic Dublin Core info.
epub_title = project
-epub_author = u'Jean-Philippe ARGAUD'
-epub_publisher = u'Jean-Philippe ARGAUD'
+epub_author = 'Jean-Philippe ARGAUD'
+epub_publisher = 'Jean-Philippe ARGAUD'
epub_copyright = copyright
# The language of the text. It defaults to the language option
# would mean that specific document would be compressed
# regardless of the global pdf_compressed setting.
pdf_documents = [
- ('contents', u'ADAO', u'ADAO', u'Jean-Philippe ARGAUD', dict(pdf_compressed = True)),
+ ('contents', 'ADAO', 'ADAO', 'Jean-Philippe ARGAUD', dict(pdf_compressed = True)),
]
# A comma-separated list of custom stylesheets. Example:
pdf_stylesheets = ['sphinx','kerning','a4']
master_doc = 'index'
# General information about the project.
-project = u'%s'%module_version.name
-copyright = u'2008-%s, Jean-Philippe ARGAUD'%module_version.year
+project = '%s'%module_version.name
+copyright = '2008-%s, Jean-Philippe ARGAUD'%module_version.year
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
- ('index', 'ADAO.tex', u'Documentation ADAO',
- u'Jean-Philippe ARGAUD', 'manual'),
+ ('index', 'ADAO.tex', 'Documentation ADAO',
+ 'Jean-Philippe ARGAUD', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# Bibliographic Dublin Core info.
epub_title = project
-epub_author = u'Jean-Philippe ARGAUD'
-epub_publisher = u'Jean-Philippe ARGAUD'
+epub_author = 'Jean-Philippe ARGAUD'
+epub_publisher = 'Jean-Philippe ARGAUD'
epub_copyright = copyright
# The language of the text. It defaults to the language option
# would mean that specific document would be compressed
# regardless of the global pdf_compressed setting.
pdf_documents = [
- ('contents', u'ADAO', u'ADAO', u'Jean-Philippe ARGAUD', dict(pdf_compressed = True)),
+ ('contents', 'ADAO', 'ADAO', 'Jean-Philippe ARGAUD', dict(pdf_compressed = True)),
]
# A comma-separated list of custom stylesheets. Example:
pdf_stylesheets = ['sphinx','kerning','a4']
def FunctionH( X ):
return H * X
#
-def AdjointH( (X, Y) ):
+def AdjointH(xxx_todo_changeme ):
+ (X, Y) = xxx_todo_changeme
return H.T * Y
#
# The possible computations
time.sleep(1)
return H * X
#
-def AdjointH( (X, Y) ):
+def AdjointH(xxx_todo_changeme ):
+ (X, Y) = xxx_todo_changeme
return H.T * Y
#
# The possible computations
# ----------------------------------------------------------
ObservationError = R
-print xb
-print B
-print yo
-print R
+print(xb)
+print(B)
+print(yo)
+print(R)
#
# Definition of the init_data dictionnary
-print " ---> observerState"
-print " var =",var[-1]
-print " info =",info
+print(" ---> observerState")
+print(" var =",var[-1])
+print(" info =",info)
#
import Gnuplot
import os
gp.plot( Gnuplot.Data( var[-1] ) )
filename = os.path.join("/tmp", "imageState_%02i.ps"%numero)
-print " imageState \"%s\""%filename
+print(" imageState \"%s\""%filename)
gp.hardcopy(filename=filename, color=1)
numero += 1
# ==============================================================================
if __name__ == "__main__":
- print
- print "AUTODIAGNOSTIC"
- print "=============="
+ print()
+ print("AUTODIAGNOSTIC")
+ print("==============")
- print
- print "True_state = ", True_state()
- print
- print "B or R =\n",Simple_Matrix(3)
- print
- print "B or R =\n",Simple_Matrix(4, diagonal=numpy.arange(4,dtype=float))
- print
+ print()
+ print("True_state = ", True_state())
+ print()
+ print("B or R =\n",Simple_Matrix(3))
+ print()
+ print("B or R =\n",Simple_Matrix(4, diagonal=numpy.arange(4,dtype=float)))
+ print()
""" Direct non-linear simulation operator """
#
# --------------------------------------> EXAMPLE TO BE REMOVED
- if type(XX) is type(numpy.matrix([])): # EXAMPLE TO BE REMOVED
+ if isinstance(XX, type(numpy.matrix([]))): # EXAMPLE TO BE REMOVED
HX = XX.A1.tolist() # EXAMPLE TO BE REMOVED
- elif type(XX) is type(numpy.array([])): # EXAMPLE TO BE REMOVED
+ elif isinstance(XX, type(numpy.array([]))): # EXAMPLE TO BE REMOVED
HX = numpy.matrix(XX).A1.tolist() # EXAMPLE TO BE REMOVED
else: # EXAMPLE TO BE REMOVED
HX = XX # EXAMPLE TO BE REMOVED
# ==============================================================================
if __name__ == "__main__":
- print
- print "AUTODIAGNOSTIC"
- print "=============="
+ print()
+ print("AUTODIAGNOSTIC")
+ print("==============")
from Physical_data_and_covariance_matrices import True_state
X0, noms = True_state()
FX = DirectOperator( X0 )
- print "FX =", FX
- print
+ print("FX =", FX)
+ print()
#
# Verifying the results by printing
# ---------------------------------
-print
-print "obs = [%s]"%(", ".join(["%.4f"%v for v in ADD.get("Observation").A1]))
-print
-print "xb = [%s]"%(", ".join(["%.4f"%v for v in ADD.get("Background").A1]))
-print "xt = [%s]"%(", ".join(["%.4f"%v for v in numpy.array(xt)]))
-print "xa = [%s]"%(", ".join(["%.4f"%v for v in numpy.array(xa)]))
-print
+print()
+print("obs = [%s]"%(", ".join(["%.4f"%v for v in ADD.get("Observation").A1])))
+print()
+print("xb = [%s]"%(", ".join(["%.4f"%v for v in ADD.get("Background").A1])))
+print("xt = [%s]"%(", ".join(["%.4f"%v for v in numpy.array(xt)])))
+print("xa = [%s]"%(", ".join(["%.4f"%v for v in numpy.array(xa)])))
+print()
for i in range( len(x_series) ):
- print "Step %2i : J = %.4e X = [%s]"%(i, J[i], ", ".join(["%.4f"%v for v in x_series[i]]))
-print
+ print("Step %2i : J = %.4e X = [%s]"%(i, J[i], ", ".join(["%.4f"%v for v in x_series[i]])))
+print()
#
# ==============================================================================
if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]:
self.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) )
if "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]:
- Qtls = map(float, self._parameters["Quantiles"])
+ Qtls = list(map(float, self._parameters["Quantiles"]))
nech = self._parameters["NumberOfSamplesForQuantiles"]
HXa = numpy.matrix(numpy.ravel( HXa )).T
YfQ = None
#
# Point de démarrage de l'optimisation : Xini = Xb
# ------------------------------------
- if type(Xb) is type(numpy.matrix([])):
+ if isinstance(Xb, type(numpy.matrix([]))):
Xini = Xb.A1.tolist()
else:
Xini = list(Xb)
#
# Sorties eventuelles
# -------------------
- print("\nResults of adjoint check by \"%s\" formula:"%self._parameters["ResiduFormula"])
+ print(("\nResults of adjoint check by \"%s\" formula:"%self._parameters["ResiduFormula"]))
print(msgs)
#
self._post_run(HO)
if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]:
self.StoredVariables["MahalanobisConsistency"].store( float( 2.*J/d.size ) )
if "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]:
- Qtls = map(float, self._parameters["Quantiles"])
+ Qtls = list(map(float, self._parameters["Quantiles"]))
nech = self._parameters["NumberOfSamplesForQuantiles"]
YfQ = None
for i in range(nech):
lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
if self._parameters["optdisp"]:
- print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
- print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
+ print(("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)))
+ print(("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)))
opt.set_upper_bounds(ub)
opt.set_lower_bounds(lb)
opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
Minimum = opt.optimize( Xini )
if self._parameters["optdisp"]:
- print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
- print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
- print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
+ print(("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)))
+ print(("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())))
+ print(("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())))
elif self._parameters["Minimizer"] == "SIMPLEX" and not PlatformInfo.has_nlopt:
Minimum, J_optimal, niter, nfeval, rc = scipy.optimize.fmin(
func = CostFunction,
lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
if self._parameters["optdisp"]:
- print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
- print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
+ print(("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)))
+ print(("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)))
opt.set_upper_bounds(ub)
opt.set_lower_bounds(lb)
opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
Minimum = opt.optimize( Xini )
if self._parameters["optdisp"]:
- print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
- print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
- print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
+ print(("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)))
+ print(("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())))
+ print(("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())))
elif self._parameters["Minimizer"] == "BOBYQA" and PlatformInfo.has_nlopt:
import nlopt
opt = nlopt.opt(nlopt.LN_BOBYQA, Xini.size)
lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
if self._parameters["optdisp"]:
- print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
- print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
+ print(("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)))
+ print(("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)))
opt.set_upper_bounds(ub)
opt.set_lower_bounds(lb)
opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
Minimum = opt.optimize( Xini )
if self._parameters["optdisp"]:
- print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
- print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
- print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
+ print(("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)))
+ print(("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())))
+ print(("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())))
elif self._parameters["Minimizer"] == "NEWUOA" and PlatformInfo.has_nlopt:
import nlopt
opt = nlopt.opt(nlopt.LN_NEWUOA, Xini.size)
lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
if self._parameters["optdisp"]:
- print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
- print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
+ print(("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)))
+ print(("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)))
opt.set_upper_bounds(ub)
opt.set_lower_bounds(lb)
opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
Minimum = opt.optimize( Xini )
if self._parameters["optdisp"]:
- print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
- print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
- print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
+ print(("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)))
+ print(("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())))
+ print(("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())))
elif self._parameters["Minimizer"] == "SUBPLEX" and PlatformInfo.has_nlopt:
import nlopt
opt = nlopt.opt(nlopt.LN_SBPLX, Xini.size)
lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf')
ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf')
if self._parameters["optdisp"]:
- print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))
- print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))
+ print(("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)))
+ print(("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)))
opt.set_upper_bounds(ub)
opt.set_lower_bounds(lb)
opt.set_ftol_rel(self._parameters["CostDecrementTolerance"])
opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"])
Minimum = opt.optimize( Xini )
if self._parameters["optdisp"]:
- print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))
- print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))
- print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))
+ print(("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)))
+ print(("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())))
+ print(("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())))
else:
raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"])
#
if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]:
self.StoredVariables["MahalanobisConsistency"].store( float( 2.*J/d.size ) )
if "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]:
- Qtls = map(float, self._parameters["Quantiles"])
+ Qtls = list(map(float, self._parameters["Quantiles"]))
nech = self._parameters["NumberOfSamplesForQuantiles"]
HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa)
HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape
msg = " ====" + "="*len(self._parameters["ResultTitle"]) + "====\n"
msg += " " + self._parameters["ResultTitle"] + "\n"
msg += " ====" + "="*len(self._parameters["ResultTitle"]) + "====\n"
- print("%s"%msg)
+ print(("%s"%msg))
#
msg = ("===> Information before launching:\n")
msg += (" -----------------------------\n")
for i in range(self._parameters["NumberOfRepetition"]):
if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]:
self.StoredVariables["CurrentState"].store( numpy.ravel(Xn) )
- print(" %s\n"%("-"*75,))
+ print((" %s\n"%("-"*75,)))
if self._parameters["NumberOfRepetition"] > 1:
- print("===> Repetition step number %i on a total of %i\n"%(i+1,self._parameters["NumberOfRepetition"]))
+ print(("===> Repetition step number %i on a total of %i\n"%(i+1,self._parameters["NumberOfRepetition"])))
print("===> Launching direct operator evaluation\n")
#
Yn = Hm( Xn )
HO["Direct"].enableAvoidingRedundancy()
# ----------
#
- print(" %s\n"%("-"*75,))
+ print((" %s\n"%("-"*75,)))
if self._parameters["SetDebug"]:
print("===> End evaluation, deactivating debug if necessary\n")
logging.getLogger().setLevel(CUR_LEVEL)
msgs += "\n"
#
# ----------
- print("\nResults of gradient check by \"%s\" formula:"%self._parameters["ResiduFormula"])
+ print(("\nResults of gradient check by \"%s\" formula:"%self._parameters["ResiduFormula"]))
print(msgs)
#
if self._parameters["PlotAndSave"]:
if filename != "":
__g.hardcopy( filename, color=1)
if pause:
- raw_input('Please press return to continue...\n')
+ eval(input('Please press return to continue...\n'))
# ==============================================================================
if __name__ == "__main__":
#
# Sorties eventuelles
# -------------------
- print("\nResults of linearity check by \"%s\" formula:"%self._parameters["ResiduFormula"])
+ print(("\nResults of linearity check by \"%s\" formula:"%self._parameters["ResiduFormula"]))
print(msgs)
#
self._post_run(HO)
def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None):
self._pre_run(Parameters)
#
- if "BoxBounds" in self._parameters and (type(self._parameters["BoxBounds"]) is type([]) or type(self._parameters["BoxBounds"]) is type(())) and (len(self._parameters["BoxBounds"]) > 0):
+ if "BoxBounds" in self._parameters and (isinstance(self._parameters["BoxBounds"], type([])) or isinstance(self._parameters["BoxBounds"], type(()))) and (len(self._parameters["BoxBounds"]) > 0):
BoxBounds = self._parameters["BoxBounds"]
logging.debug("%s Prise en compte des bornes d'incréments de paramètres effectuee"%(self._name,))
else:
#
# Point de démarrage de l'optimisation : Xini = Xb
# ------------------------------------
- if type(Xb) is type(numpy.matrix([])):
+ if isinstance(Xb, type(numpy.matrix([]))):
Xini = Xb.A1.tolist()
elif Xb is not None:
Xini = list(Xb)
#
# Point de démarrage de l'optimisation : Xini = Xb
# ------------------------------------
- if type(Xb) is type(numpy.matrix([])):
+ if isinstance(Xb, type(numpy.matrix([]))):
Xini = Xb.A1.tolist()
else:
Xini = list(Xb)
CUR_LEVEL = logging.getLogger().getEffectiveLevel()
logging.getLogger().setLevel(logging.DEBUG)
print("===> Beginning of evaluation, activating debug\n")
- print(" %s\n"%("-"*75,))
+ print((" %s\n"%("-"*75,)))
#
# ----------
for i,Xx in enumerate(sampleList):
if self._parameters["SetDebug"]:
- print("===> Launching evaluation for state %i"%i)
+ print(("===> Launching evaluation for state %i"%i))
__Xn = numpy.asmatrix(numpy.ravel( Xx )).T
try:
Yn = Hm( __Xn )
# ----------
#
if self._parameters["SetDebug"]:
- print("\n %s\n"%("-"*75,))
+ print(("\n %s\n"%("-"*75,)))
print("===> End evaluation, deactivating debug if necessary\n")
logging.getLogger().setLevel(CUR_LEVEL)
#
#
# Sorties eventuelles
# -------------------
- print("\nResults of tangent check by \"%s\" formula:"%self._parameters["ResiduFormula"])
+ print(("\nResults of tangent check by \"%s\" formula:"%self._parameters["ResiduFormula"]))
print(msgs)
#
self._post_run(HO)
#
if appliedToX is not None:
self.__HO["AppliedToX"] = {}
- if type(appliedToX) is not dict:
+ if not isinstance(appliedToX, dict):
raise ValueError("Error: observation operator defined by \"appliedToX\" need a dictionary as argument.")
for key in list(appliedToX.keys()):
- if type( appliedToX[key] ) is type( numpy.matrix([]) ):
+ if isinstance(appliedToX[key], type( numpy.matrix([]) )):
# Pour le cas où l'on a une vraie matrice
self.__HO["AppliedToX"][key] = numpy.matrix( appliedToX[key].A1, numpy.float ).T
- elif type( appliedToX[key] ) is type( numpy.array([]) ) and len(appliedToX[key].shape) > 1:
+ elif isinstance(appliedToX[key], type( numpy.array([]) )) and len(appliedToX[key].shape) > 1:
# Pour le cas où l'on a un vecteur représenté en array avec 2 dimensions
self.__HO["AppliedToX"][key] = numpy.matrix( appliedToX[key].reshape(len(appliedToX[key]),), numpy.float ).T
else:
if not( min(__EM_shape) == max(__EM_shape) ):
raise ValueError("Shape characteristic of evolution operator (EM) is incorrect: \"%s\"."%(__EM_shape,))
#
- if len(self.__HO) > 0 and not(type(self.__HO) is type({})) and not( __HO_shape[1] == max(__Xb_shape) ):
+ if len(self.__HO) > 0 and not(isinstance(self.__HO, type({}))) and not( __HO_shape[1] == max(__Xb_shape) ):
raise ValueError("Shape characteristic of observation operator (H) \"%s\" and state (X) \"%s\" are incompatible."%(__HO_shape,__Xb_shape))
- if len(self.__HO) > 0 and not(type(self.__HO) is type({})) and not( __HO_shape[0] == max(__Y_shape) ):
+ if len(self.__HO) > 0 and not(isinstance(self.__HO, type({}))) and not( __HO_shape[0] == max(__Y_shape) ):
raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation (Y) \"%s\" are incompatible."%(__HO_shape,__Y_shape))
- if len(self.__HO) > 0 and not(type(self.__HO) is type({})) and len(self.__B) > 0 and not( __HO_shape[1] == __B_shape[0] ):
+ if len(self.__HO) > 0 and not(isinstance(self.__HO, type({}))) and len(self.__B) > 0 and not( __HO_shape[1] == __B_shape[0] ):
raise ValueError("Shape characteristic of observation operator (H) \"%s\" and a priori errors covariance matrix (B) \"%s\" are incompatible."%(__HO_shape,__B_shape))
- if len(self.__HO) > 0 and not(type(self.__HO) is type({})) and len(self.__R) > 0 and not( __HO_shape[0] == __R_shape[1] ):
+ if len(self.__HO) > 0 and not(isinstance(self.__HO, type({}))) and len(self.__R) > 0 and not( __HO_shape[0] == __R_shape[1] ):
raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation errors covariance matrix (R) \"%s\" are incompatible."%(__HO_shape,__R_shape))
#
if self.__B is not None and len(self.__B) > 0 and not( __B_shape[1] == max(__Xb_shape) ):
if self.__R is not None and len(self.__R) > 0 and not( __R_shape[1] == max(__Y_shape) ):
raise ValueError("Shape characteristic of observation errors covariance matrix (R) \"%s\" and observation (Y) \"%s\" are incompatible."%(__R_shape,__Y_shape))
#
- if self.__EM is not None and len(self.__EM) > 0 and not(type(self.__EM) is type({})) and not( __EM_shape[1] == max(__Xb_shape) ):
+ if self.__EM is not None and len(self.__EM) > 0 and not(isinstance(self.__EM, type({}))) and not( __EM_shape[1] == max(__Xb_shape) ):
raise ValueError("Shape characteristic of evolution model (EM) \"%s\" and state (X) \"%s\" are incompatible."%(__EM_shape,__Xb_shape))
#
- if self.__CM is not None and len(self.__CM) > 0 and not(type(self.__CM) is type({})) and not( __CM_shape[1] == max(__U_shape) ):
+ if self.__CM is not None and len(self.__CM) > 0 and not(isinstance(self.__CM, type({}))) and not( __CM_shape[1] == max(__U_shape) ):
raise ValueError("Shape characteristic of control model (CM) \"%s\" and control (U) \"%s\" are incompatible."%(__CM_shape,__U_shape))
#
if ("AlgorithmParameters" in self.__StoredInputs) \
self.__setParameters(Parameters)
#
# Corrections et complements
- if "Bounds" in self._parameters and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0):
+ if "Bounds" in self._parameters and (isinstance(self._parameters["Bounds"], type([])) or isinstance(self._parameters["Bounds"], type(()))) and (len(self._parameters["Bounds"]) > 0):
logging.debug("%s Prise en compte des bornes effectuee"%(self._name,))
else:
self._parameters["Bounds"] = None
def keys(self):
"D.keys() -> list of D's keys"
- return self.StoredVariables.keys()
+ return list(self.StoredVariables.keys())
def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None):
"""
dictionnaire des paramètres requis.
"""
if noDetails:
- ks = list(self.__required_parameters.keys())
- ks.sort()
+ ks = sorted(self.__required_parameters.keys())
return ks
else:
return self.__required_parameters
Permet de stocker les paramètres reçus dans le dictionnaire interne.
"""
self._parameters.update( fromDico )
- for k in self.__required_parameters.keys():
- if k in fromDico.keys():
+ for k in list(self.__required_parameters.keys()):
+ if k in list(fromDico.keys()):
self._parameters[k] = self.setParameterValue(k,fromDico[k])
else:
self._parameters[k] = self.setParameterValue(k)
# ---------------------------------------------------------
def __filteredIndexes(self, **kwargs):
"Function interne filtrant les index"
- __indexOfFilteredItems = range(len(self.__tags))
- __filteringKwTags = kwargs.keys()
+ __indexOfFilteredItems = list(range(len(self.__tags)))
+ __filteringKwTags = list(kwargs.keys())
if len(__filteringKwTags) > 0:
for tagKey in __filteringKwTags:
__tmp = []
"D.tagkeys() -> list of D's tag keys"
__allKeys = []
for dicotags in self.__tags:
- __allKeys.extend( dicotags.keys() )
- __allKeys = list(set(__allKeys))
- __allKeys.sort()
+ __allKeys.extend( list(dicotags.keys()) )
+ __allKeys = sorted(set(__allKeys))
return __allKeys
# def valueserie(self, item=None, allSteps=True, **kwargs):
if outputTag is not None and isinstance(outputTag,str) :
outputValues = []
for index in __indexOfFilteredItems:
- if outputTag in self.__tags[index].keys():
+ if outputTag in list(self.__tags[index].keys()):
outputValues.append( self.__tags[index][outputTag] )
- outputValues = list(set(outputValues))
- outputValues.sort()
+ outputValues = sorted(set(outputValues))
return outputValues
#
# Dans le cas où la sortie donne les tags satisfaisants aux conditions
allTags = {}
for index in __indexOfFilteredItems:
allTags.update( self.__tags[index] )
- allKeys = list(allTags.keys())
- allKeys.sort()
+ allKeys = sorted(allTags.keys())
return allKeys
# ---------------------------------------------------------
elif item is not None and item < len(self.__values):
indexes.append(item)
else:
- indexes = indexes + range(len(self.__values))
+ indexes = indexes + list(range(len(self.__values)))
#
i = -1
for index in indexes:
if isinstance(steps,list) or isinstance(steps,numpy.ndarray):
Steps = list(steps)
else:
- Steps = range(len(self.__values[index]))
+ Steps = list(range(len(self.__values[index])))
#
self.__g.plot( self.__gnuplot.Data( Steps, self.__values[index], title=ltitle ) )
#
raise ValueError("Error: a file with this name \"%s\" already exists."%stepfilename)
self.__g.hardcopy(filename=stepfilename, color=1)
if self.__pause:
- raw_input('Please press return to continue...\n')
+ eval(input('Please press return to continue...\n'))
def __replots(self):
"""
if self.__dynamic and len(self.__values) < 2: return 0
#
self.__g('set title "'+str(self.__title).encode('ascii','replace'))
- Steps = range(len(self.__values))
+ Steps = list(range(len(self.__values)))
self.__g.plot( self.__gnuplot.Data( Steps, self.__values, title=self.__ltitle ) )
#
if self.__pause:
- raw_input('Please press return to continue...\n')
+ eval(input('Please press return to continue...\n'))
# ---------------------------------------------------------
def mean(self):
if isinstance(steps,list) or isinstance(steps, numpy.ndarray):
Steps = list(steps)
else:
- Steps = range(len(self.__values[0]))
+ Steps = list(range(len(self.__values[0])))
self.__g = self.__gnuplot.Gnuplot() # persist=1
self.__g('set terminal '+self.__gnuplot.GnuplotOpts.default_term)
self.__g('set style data lines')
if filename != "":
self.__g.hardcopy(filename=filename, color=1)
if pause:
- raw_input('Please press return to continue...\n')
+ eval(input('Please press return to continue...\n'))
# ---------------------------------------------------------
def setDataObserver(self, HookFunction = None, HookParameters = None, Scheduler = None):
maxiter = int( 1e9 )
if sys.version.split()[0] < '3':
if isinstance(Scheduler,int): # Considéré comme une fréquence à partir de 0
- Schedulers = xrange( 0, maxiter, int(Scheduler) )
+ Schedulers = list(range( 0, maxiter, int(Scheduler)))
elif isinstance(Scheduler,xrange): # Considéré comme un itérateur
Schedulers = Scheduler
elif isinstance(Scheduler,(list,tuple)): # Considéré comme des index explicites
- Schedulers = [long(i) for i in Scheduler] # map( long, Scheduler )
+ Schedulers = [int(i) for i in Scheduler] # map( long, Scheduler )
else: # Dans tous les autres cas, activé par défaut
- Schedulers = xrange( 0, maxiter )
+ Schedulers = list(range( 0, maxiter))
else:
if isinstance(Scheduler,int): # Considéré comme une fréquence à partir de 0
- Schedulers = range( 0, maxiter, int(Scheduler) )
+ Schedulers = list(range( 0, maxiter, int(Scheduler)))
elif sys.version.split()[0] > '3' and isinstance(Scheduler,range): # Considéré comme un itérateur
Schedulers = Scheduler
elif isinstance(Scheduler,(list,tuple)): # Considéré comme des index explicites
Schedulers = [int(i) for i in Scheduler] # map( int, Scheduler )
else: # Dans tous les autres cas, activé par défaut
- Schedulers = range( 0, maxiter )
+ Schedulers = list(range( 0, maxiter))
#
# Stockage interne de l'observer dans la variable
# -----------------------------------------------
définition, ou un simple string qui est le nom de la fonction.
"""
if hasattr(HookFunction,"func_name"):
- name = str( HookFunction.func_name )
+ name = str( HookFunction.__name__ )
elif isinstance(HookFunction,str):
name = str( HookFunction )
else:
Stockage d'une valeur "value" pour le "step" dans la variable "name".
"""
if name is None: raise ValueError("Storable object name is required for storage.")
- if name not in self.__StoredObjects.keys():
+ if name not in list(self.__StoredObjects.keys()):
raise ValueError("No such name '%s' exists in storable objects."%name)
self.__StoredObjects[name].store( value=value, **kwargs )
type de Persistence et son type de base à chaque pas.
"""
if name is None: raise ValueError("Object name is required for adding an object.")
- if name in self.__StoredObjects.keys():
+ if name in list(self.__StoredObjects.keys()):
raise ValueError("An object with the same name '%s' already exists in storable objects. Choose another one."%name)
if basetype is None:
self.__StoredObjects[name] = persistenceType( name=str(name) )
Renvoie l'objet de type Persistence qui porte le nom demandé.
"""
if name is None: raise ValueError("Object name is required for retrieving an object.")
- if name not in self.__StoredObjects.keys():
+ if name not in list(self.__StoredObjects.keys()):
raise ValueError("No such name '%s' exists in stored objects."%name)
return self.__StoredObjects[name]
fonctionne.
"""
if name is None: raise ValueError("Object name is required for setting an object.")
- if name in self.__StoredObjects.keys():
+ if name in list(self.__StoredObjects.keys()):
raise ValueError("An object with the same name '%s' already exists in storable objects. Choose another one."%name)
self.__StoredObjects[name] = objet
Supprime un objet de la liste des objets stockables.
"""
if name is None: raise ValueError("Object name is required for retrieving an object.")
- if name not in self.__StoredObjects.keys():
+ if name not in list(self.__StoredObjects.keys()):
raise ValueError("No such name '%s' exists in stored objects."%name)
del self.__StoredObjects[name]
def values(self):
"D.values() -> list of D's values"
- return self.__StoredObjects.values()
+ return list(self.__StoredObjects.values())
def items(self):
"D.items() -> list of D's (key, value) pairs, as 2-tuples"
- return self.__StoredObjects.items()
+ return list(self.__StoredObjects.items())
# ---------------------------------------------------------
def get_stored_objects(self, hideVoidObjects = False):
"Renvoie la liste des objets présents"
- objs = self.__StoredObjects.keys()
+ objs = list(self.__StoredObjects.keys())
if hideVoidObjects:
usedObjs = []
for k in objs:
finally:
pass
objs = usedObjs
- objs = list(objs)
- objs.sort()
+ objs = sorted(objs)
return objs
# ---------------------------------------------------------
filename = os.path.abspath( filename )
#
if sys.version.split()[0] < '3':
- import cPickle as lPickle
+ import pickle as lPickle
else:
import pickle as lPickle
if mode == "pickle":
filename = os.path.abspath( filename )
#
if sys.version.split()[0] < '3':
- import cPickle as lPickle
+ import pickle as lPickle
else:
import pickle as lPickle
if mode == "pickle":
else:
pkl_file = open(filename, 'rb')
output = lPickle.load(pkl_file)
- for k in output.keys():
+ for k in list(output.keys()):
self[k] = output[k]
else:
raise ValueError("Load mode '%s' unknown. Choose another one."%mode)
self.__paths["daMatrices"] = os.path.join(parent,"daMatrices")
self.__paths["daNumerics"] = os.path.join(parent,"daNumerics")
#
- for v in self.__paths.values():
+ for v in list(self.__paths.values()):
sys.path.insert(0, v )
#
# Conserve en unique exemplaire chaque chemin
def keys(self):
"D.keys() -> list of D's keys"
- __keys = list(self.__values.keys())
- __keys.sort()
+ __keys = sorted(self.__values.keys())
return __keys
# def has_key(self, name):
def keys_in_presentation_order(self):
"D.keys_in_presentation_order() -> list of D's keys in presentation order"
__orders = []
- for k in self.keys():
+ for k in list(self.keys()):
__orders.append( self.__values[k]['order'] )
__reorder = numpy.array(__orders).argsort()
- return list(numpy.array(self.keys())[__reorder])
+ return list(numpy.array(list(self.keys()))[__reorder])
# ==============================================================================
ObserverTemplates = TemplateStorage()
if filename != "":
self.__g.hardcopy(filename=filename, color=1)
if pause:
- raw_input('Please press return to continue...\n')
+ eval(input('Please press return to continue...\n'))
#
return 1
if Vector.size < 1:
raise ValueError("The given vector must not be empty")
if steps is None:
- Steps = range(len( vector ))
- elif not ( type(steps) is type([]) or type(steps) is not type(numpy.array([])) ):
+ Steps = list(range(len( vector )))
+ elif not ( isinstance(steps, type([])) or not isinstance(steps, type(numpy.array([]))) ):
raise ValueError("The steps must be given as a list/tuple.")
else:
Steps = list(steps)
if filename != "":
self.__g.hardcopy(filename=filename, color=1)
if pause:
- raw_input('Please press return to continue...\n')
+ eval(input('Please press return to continue...\n'))
#
return 1
"""
if vector is None:
raise ValueError("One vector must be given to plot it.")
- if type(vector) is not type([]) and type(vector) is not type(()):
+ if not isinstance(vector, type([])) and not isinstance(vector, type(())):
raise ValueError("The vector(s) must be given as a list/tuple.")
if ltitle is None or len(ltitle) != len(vector):
ltitle = ["" for i in range(len(vector))]
if VectorList[-1].size < 1:
raise ValueError("Each given vector must not be empty.")
if steps is None:
- Steps = range(len(vector[0]))
- elif not ( type(steps) is type([]) or type(steps) is not type(numpy.array([])) ):
+ Steps = list(range(len(vector[0])))
+ elif not ( isinstance(steps, type([])) or not isinstance(steps, type(numpy.array([]))) ):
raise ValueError("The steps must be given as a list/tuple.")
else:
Steps = list(steps)
vect2 = [0,0,0,0,0,0,0,0,0,0]
D.calculate(vect1,vect2)
print(" Les valeurs de RMS attendues sont les suivantes : [1.0, 1.0, 1.0, 3.0, 0.53162016515553656, 0.73784217096601323]")
- print(" Les RMS obtenues................................: %s"%(D[:],))
- print(" La moyenne......................................: %s"%(D.mean(),))
+ print((" Les RMS obtenues................................: %s"%(D[:],)))
+ print((" La moyenne......................................: %s"%(D.mean(),)))
print("")
# ----------------------
x1 = numpy.matrix(([3. , 4., 5. ]))
x2 = numpy.matrix(([1.5, 2., 2.5]))
- print(" L'écart entre les observations et l'ébauche est OMB : %s"%(x1,))
- print(" La moyenne de OMB (i.e. le biais) est de............: %s"%(x1.mean(),))
- print(" La variance de OMB est de...........................: %s"%(x1.var(),))
- print(" L'écart entre les observations et l'analyse est OMA : %s"%(x2,))
- print(" La moyenne de OMA (i.e. le biais) est de............: %s"%(x2.mean(),))
- print(" La variance de OMA est de...........................: %s"%(x2.var(),))
+ print((" L'écart entre les observations et l'ébauche est OMB : %s"%(x1,)))
+ print((" La moyenne de OMB (i.e. le biais) est de............: %s"%(x1.mean(),)))
+ print((" La variance de OMB est de...........................: %s"%(x1.var(),)))
+ print((" L'écart entre les observations et l'analyse est OMA : %s"%(x2,)))
+ print((" La moyenne de OMA (i.e. le biais) est de............: %s"%(x2.mean(),)))
+ print((" La variance de OMA est de...........................: %s"%(x2.var(),)))
#
D.calculate( vectorOMB = x1, vectorOMA = x2)
if not D[0] :
#
# Vecteur de type array
# ---------------------
- x1 = numpy.array(range(11))
- x2 = numpy.matrix(range(-10,12,2))
- print(" L'écart entre les observations et l'ébauche est OMB : %s"%(x1,))
- print(" La moyenne de OMB (i.e. le biais) est de............: %s"%(x1.mean(),))
- print(" La variance de OMB est de...........................: %s"%(x1.var(),))
- print(" L'écart entre les observations et l'analyse est OMA : %s"%(x2,))
- print(" La moyenne de OMA (i.e. le biais) est de............: %s"%(x2.mean(),))
- print(" La variance de OMA est de...........................: %s"%(x2.var(),))
+ x1 = numpy.array(list(range(11)))
+ x2 = numpy.matrix(list(range(-10,12,2)))
+ print((" L'écart entre les observations et l'ébauche est OMB : %s"%(x1,)))
+ print((" La moyenne de OMB (i.e. le biais) est de............: %s"%(x1.mean(),)))
+ print((" La variance de OMB est de...........................: %s"%(x1.var(),)))
+ print((" L'écart entre les observations et l'analyse est OMA : %s"%(x2,)))
+ print((" La moyenne de OMA (i.e. le biais) est de............: %s"%(x2.mean(),)))
+ print((" La variance de OMA est de...........................: %s"%(x2.var(),)))
#
D.calculate( vectorOMB = x1, vectorOMA = x2)
if not D[1] :
try:
mod = os.path.join(Function.__globals__['filepath'],Function.__globals__['filename'])
except:
- mod = os.path.abspath(Function.im_func.__globals__['__file__'])
+ mod = os.path.abspath(Function.__func__.__globals__['__file__'])
if not os.path.isfile(mod):
raise ImportError("No user defined function or method found with the name %s"%(mod,))
self.__userFunction__modl = os.path.basename(mod).replace('.pyc','').replace('.pyo','').replace('.py','')
#
# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
#
-# Author: André Ribes, andre.ribes@edf.fr, EDF R&D
+# Author: André Ribes, andre.ribes@edf.fr, EDF R&D
"""
- Ce module sert pour charger les paramètres de configuration d'EFICAS
+ Ce module sert pour charger les paramètres de configuration d'EFICAS
"""
# Modules Python
# print "passage dans la surcharge de configuration pour Adao"
def gener(self,obj,format='brut',config=None,appli=None):
self.logger.debug("method gener called")
self.text_comm = PythonGenerator.gener(self, obj, format, config)
- for key, value in self.dictMCVal.iteritems():
+ for key, value in list(self.dictMCVal.items()):
self.logger.debug("dictMCVAl %s %s" % (key,value))
try :
self.logger.debug("EFICAS case is not valid, python command file for YACS schema generation cannot be created")
self.logger.debug(self.text_da)
self.dictMCVal = {}
- # traceback.print_exc()
+ # traceback.print_exc()
return self.text_comm
def writeDefault(self, fn):
def generate_da(self):
- if "__CHECKING_STUDY__StudyName" in self.dictMCVal.keys():
+ if "__CHECKING_STUDY__StudyName" in list(self.dictMCVal.keys()):
self.type_of_study = "CHECKING_STUDY"
else:
self.type_of_study = "ASSIMILATION_STUDY"
# Extraction de StudyName
self.text_da += "study_config['Name'] = '" + self.dictMCVal["__"+self.type_of_study+"__StudyName"] + "'\n"
# Extraction de Debug
- if "__"+self.type_of_study+"__Debug" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__Debug" in list(self.dictMCVal.keys()):
self.text_da += "study_config['Debug'] = '" + str(self.dictMCVal["__"+self.type_of_study+"__Debug"]) + "'\n"
else:
self.text_da += "study_config['Debug'] = '0'\n"
# Extraction de Algorithm et de ses parametres
- if "__"+self.type_of_study+"__AlgorithmParameters__Algorithm" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__AlgorithmParameters__Algorithm" in list(self.dictMCVal.keys()):
self.text_da += "study_config['Algorithm'] = '" + self.dictMCVal["__"+self.type_of_study+"__AlgorithmParameters__Algorithm"] + "'\n"
self.add_AlgorithmParameters()
- elif "__"+self.type_of_study+"__Algorithm" in self.dictMCVal.keys():
+ elif "__"+self.type_of_study+"__Algorithm" in list(self.dictMCVal.keys()):
self.text_da += "study_config['Algorithm'] = '" + self.dictMCVal["__"+self.type_of_study+"__Algorithm"] + "'\n"
- if "__"+self.type_of_study+"__Background__INPUT_TYPE" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__Background__INPUT_TYPE" in list(self.dictMCVal.keys()):
self.add_data("Background")
- if "__"+self.type_of_study+"__BackgroundError__INPUT_TYPE" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__BackgroundError__INPUT_TYPE" in list(self.dictMCVal.keys()):
self.add_data("BackgroundError")
- if "__"+self.type_of_study+"__Observation__INPUT_TYPE" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__Observation__INPUT_TYPE" in list(self.dictMCVal.keys()):
self.add_data("Observation")
- if "__"+self.type_of_study+"__ObservationError__INPUT_TYPE" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__ObservationError__INPUT_TYPE" in list(self.dictMCVal.keys()):
self.add_data("ObservationError")
- if "__"+self.type_of_study+"__CheckingPoint__INPUT_TYPE" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__CheckingPoint__INPUT_TYPE" in list(self.dictMCVal.keys()):
self.add_data("CheckingPoint")
- if "__"+self.type_of_study+"__ObservationOperator__INPUT_TYPE" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__ObservationOperator__INPUT_TYPE" in list(self.dictMCVal.keys()):
self.add_data("ObservationOperator")
- if "__"+self.type_of_study+"__EvolutionModel__INPUT_TYPE" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__EvolutionModel__INPUT_TYPE" in list(self.dictMCVal.keys()):
self.add_data("EvolutionModel")
- if "__"+self.type_of_study+"__EvolutionError__INPUT_TYPE" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__EvolutionError__INPUT_TYPE" in list(self.dictMCVal.keys()):
self.add_data("EvolutionError")
- if "__"+self.type_of_study+"__ControlInput__INPUT_TYPE" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__ControlInput__INPUT_TYPE" in list(self.dictMCVal.keys()):
self.add_data("ControlInput")
self.add_variables()
# Parametres optionnels
# Extraction du StudyRepertory
- if "__"+self.type_of_study+"__StudyRepertory" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__StudyRepertory" in list(self.dictMCVal.keys()):
self.text_da += "study_config['Repertory'] = '" + self.dictMCVal["__"+self.type_of_study+"__StudyRepertory"] + "'\n"
# Extraction de UserPostAnalysis
- if "__"+self.type_of_study+"__UserPostAnalysis__FROM" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__UserPostAnalysis__FROM" in list(self.dictMCVal.keys()):
self.add_UserPostAnalysis()
- if "__"+self.type_of_study+"__UserDataInit__INIT_FILE" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__UserDataInit__INIT_FILE" in list(self.dictMCVal.keys()):
self.add_init()
- if "__"+self.type_of_study+"__Observers__SELECTION" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__Observers__SELECTION" in list(self.dictMCVal.keys()):
self.add_observers()
def add_data(self, data_name):
self.text_da += data_name + "_config['Type'] = '" + data_type + "'\n"
self.text_da += data_name + "_config['From'] = '" + from_type + "'\n"
self.text_da += data_name + "_config['Data'] = '" + data + "'\n"
- if search_text+"Stored" in self.dictMCVal.keys():
+ if search_text+"Stored" in list(self.dictMCVal.keys()):
self.text_da += data_name + "_config['Stored'] = '" + str(self.dictMCVal[search_text+"Stored"]) + "'\n"
self.text_da += "study_config['" + data_name + "'] = " + data_name + "_config\n"
self.text_da += data_name + "_ScriptWithOneFunction['Script']['Adjoint'] = '" + data + "'\n"
self.text_da += data_name + "_ScriptWithOneFunction['DifferentialIncrement'] = " + str(float(self.dictMCVal[search_type + "SCRIPTWITHONEFUNCTION_DATA__DifferentialIncrement"])) + "\n"
self.text_da += data_name + "_ScriptWithOneFunction['CenteredFiniteDifference'] = " + str(self.dictMCVal[search_type + "SCRIPTWITHONEFUNCTION_DATA__CenteredFiniteDifference"]) + "\n"
- if search_type + "SCRIPTWITHONEFUNCTION_DATA__EnableMultiProcessing" in self.dictMCVal.keys():
+ if search_type + "SCRIPTWITHONEFUNCTION_DATA__EnableMultiProcessing" in list(self.dictMCVal.keys()):
self.text_da += data_name + "_ScriptWithOneFunction['EnableMultiProcessing'] = " + str(self.dictMCVal[search_type + "SCRIPTWITHONEFUNCTION_DATA__EnableMultiProcessing"]) + "\n"
- if search_type + "SCRIPTWITHONEFUNCTION_DATA__NumberOfProcesses" in self.dictMCVal.keys():
+ if search_type + "SCRIPTWITHONEFUNCTION_DATA__NumberOfProcesses" in list(self.dictMCVal.keys()):
self.text_da += data_name + "_ScriptWithOneFunction['NumberOfProcesses'] = " + str(self.dictMCVal[search_type + "SCRIPTWITHONEFUNCTION_DATA__NumberOfProcesses"]) + "\n"
self.text_da += data_name + "_config = {}\n"
self.text_da += data_name + "_config['Type'] = 'Function'\n"
self.text_da += "Init_config['From'] = 'Script'\n"
self.text_da += "Init_config['Data'] = '" + init_file_data + "'\n"
self.text_da += "Init_config['Target'] = ["
- if type(init_target_list) is type("str"):
+ if isinstance(init_target_list, type("str")):
self.text_da += "'" + init_target_list + "',"
else:
for target in init_target_list:
def add_AlgorithmParameters(self):
- if not self.dictMCVal.has_key("__"+self.type_of_study+"__AlgorithmParameters__Parameters"): return
+ if "__"+self.type_of_study+"__AlgorithmParameters__Parameters" not in self.dictMCVal: return
data_name = "AlgorithmParameters"
data_type = "Dict"
self.text_da += "study_config['" + data_name + "'] = " + data_name + "_config\n"
elif from_type == "Defaults":
base = "__"+self.type_of_study+"__AlgorithmParameters__Parameters"
- keys = [k for k in self.dictMCVal.keys() if base in k]
+ keys = [k for k in list(self.dictMCVal.keys()) if base in k]
keys.remove(base)
keys = [k.replace(base,'') for k in keys]
data = '{'
def add_variables(self):
# Input variables
- if "__"+self.type_of_study+"__InputVariables__NAMES" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__InputVariables__NAMES" in list(self.dictMCVal.keys()):
names = []
sizes = []
if isinstance(self.dictMCVal["__"+self.type_of_study+"__InputVariables__NAMES"], type("")):
self.text_da += "study_config['InputVariables'] = inputvariables_config\n"
# Output variables
- if "__"+self.type_of_study+"__OutputVariables__NAMES" in self.dictMCVal.keys():
+ if "__"+self.type_of_study+"__OutputVariables__NAMES" in list(self.dictMCVal.keys()):
names = []
sizes = []
if isinstance(self.dictMCVal["__"+self.type_of_study+"__OutputVariables__NAMES"], type("")):
# Write observers in the python command file
number = 2
self.text_da += "observers = {}\n"
- for observer in observers.keys():
+ for observer in list(observers.keys()):
number += 1
self.text_da += "observers[\"" + observer + "\"] = {}\n"
self.text_da += "observers[\"" + observer + "\"][\"number\"] = " + str(number) + "\n"
self.text_da += "observers[\"" + observer + "\"][\"Template\"] = \"\"\"" + observers[observer]["template"] + "\"\"\"\n"
else:
self.text_da += "observers[\"" + observer + "\"][\"Script\"] = \"" + observers[observer]["file"] + "\"\n"
- if "scheduler" in observers[observer].keys():
+ if "scheduler" in list(observers[observer].keys()):
self.text_da += "observers[\"" + observer + "\"][\"scheduler\"] = \"\"\"" + observers[observer]["scheduler"] + "\"\"\"\n"
- if "info" in observers[observer].keys():
+ if "info" in list(observers[observer].keys()):
self.text_da += "observers[\"" + observer + "\"][\"info\"] = \"\"\"" + observers[observer]["info"] + "\"\"\"\n"
self.text_da += "study_config['Observers'] = observers\n"
# Scheduler
scheduler_key_name = observer_eficas_name + "Scheduler"
- if scheduler_key_name in self.dictMCVal.keys():
+ if scheduler_key_name in list(self.dictMCVal.keys()):
observers[observer]["scheduler"] = self.dictMCVal[scheduler_key_name]
# Info
info_key_name = observer_eficas_name + "Info"
- if info_key_name in self.dictMCVal.keys():
+ if info_key_name in list(self.dictMCVal.keys()):
observers[observer]["info"] = self.dictMCVal[info_key_name]
options, args = parser.parse_args()
if len(options.infile) == 0:
- print
+ print()
parser.print_help()
- print
+ print()
sys.exit(1)
traduc(options.infile,options.outfile)
options, args = parser.parse_args()
if len(options.infile) == 0:
- print
+ print()
parser.print_help()
- print
+ print()
sys.exit(1)
traduc(options.infile,options.outfile)
options, args = parser.parse_args()
if len(options.infile) == 0:
- print
+ print()
parser.print_help()
- print
+ print()
sys.exit(1)
traduc(options.infile,options.outfile)
options, args = parser.parse_args()
if len(options.infile) == 0:
- print
+ print()
parser.print_help()
- print
+ print()
sys.exit(1)
traduc(options.infile,options.outfile)
options, args = parser.parse_args()
if len(options.infile) == 0:
- print
+ print()
parser.print_help()
- print
+ print()
sys.exit(1)
traduc(options.infile,options.outfile)
options, args = parser.parse_args()
if len(options.infile) == 0:
- print
+ print()
parser.print_help()
- print
+ print()
sys.exit(1)
traduc(options.infile,options.outfile)
options, args = parser.parse_args()
if len(options.infile) == 0:
- print
+ print()
parser.print_help()
- print
+ print()
sys.exit(1)
traduc(options.infile,options.outfile)
options, args = parser.parse_args()
if len(options.infile) == 0:
- print
+ print()
parser.print_help()
- print
+ print()
sys.exit(1)
traduc(options.infile,options.outfile)
options, args = parser.parse_args()
if len(options.infile) == 0:
- print
+ print()
parser.print_help()
- print
+ print()
sys.exit(1)
traduc(options.infile,options.outfile)
debug("tabChanged " + str(index))
# This signal is also emit when a new case is created/added
# On regarde que le dictionnaire contient l'index
- if index in self.viewmanager.dict_editors.keys():
+ if index in list(self.viewmanager.dict_editors.keys()):
self.notifyObserver(EficasEvent.EVENT_TYPES.TABCHANGED, callbackId=self.viewmanager.dict_editors[index])
#######
def selectCase(self, editor):
rtn = False
- for indexEditor in self.viewmanager.dict_editors.keys():
+ for indexEditor in list(self.viewmanager.dict_editors.keys()):
if editor is self.viewmanager.dict_editors[indexEditor]:
self.viewmanager.myQtab.setCurrentIndex(indexEditor)
rtn = True
def _setContext( studyID ):
global __study2context__, __current_context__
QApplication.processEvents()
- if not __study2context__.has_key(studyID):
+ if studyID not in __study2context__:
__study2context__[studyID] = GUIcontext()
pass
__current_context__ = __study2context__[studyID]
# called when module is initialized
# return list of 2d/3d views to be used ny the module
def views():
- print "views"
+ print("views")
return []
def createPreferences():
import eficasSalome
from Ihm import CONNECTOR
-import adaoGuiHelper
-import adaoStudyEditor
+from . import adaoGuiHelper
+from . import adaoStudyEditor
class AdaoCase:
msg += "case with the ADAO/EFICAS editor."
return msg
- if not os.environ.has_key("ADAO_ROOT_DIR"):
+ if "ADAO_ROOT_DIR" not in os.environ:
return "Please add ADAO_ROOT_DIR to your environnement."
adao_path = os.environ["ADAO_ROOT_DIR"]
def validationReportforJDC(self):
rtn = "<i>Validation report is empty.</i>"
if self.eficas_editor.jdc:
- rtn = u"Validation report for the selected ADAO case:\n\n"
- rtn += unicode( self.eficas_editor.jdc.report())
+ rtn = "Validation report for the selected ADAO case:\n\n"
+ rtn += str( self.eficas_editor.jdc.report())
return rtn
def showTreeAdaoCase(self):
import SalomePyQt
__sgPyQt = SalomePyQt.SalomePyQt()
-import adaoModuleHelper
+from . import adaoModuleHelper
from daUtils.qtversion import useQT5
if useQT5:
from PyQt5 import QtGui, QtCore
from daEficasWrapper.adaoEficasWrapper import AdaoEficasWrapper
from daUtils.adaoEficasEvent import *
-import adaoGuiHelper
-import adaoStudyEditor
+from . import adaoGuiHelper
+from . import adaoStudyEditor
from daUtils import adaoLogger
__cases__ = {}
adaoLogger.debug("currentSelectionChanged")
salomeStudyItem = adaoGuiHelper.getSelectedItem()
if salomeStudyItem is not None:
- for case_editor, adao_case in self.cases.iteritems():
+ for case_editor, adao_case in list(self.cases.items()):
if adao_case.salome_study_item.GetID() == salomeStudyItem.GetID():
self.eficas_manager.selectCase(adao_case.eficas_editor)
break
et la selection dans l'etude SALOME
"""
editor = eficasEvent.callbackId
- for case_editor, adao_case in self.cases.iteritems():
+ for case_editor, adao_case in list(self.cases.items()):
if case_editor is editor:
adaoGuiHelper.selectItem(adao_case.salome_study_item.GetID())
break
editor = self.eficas_manager.getCurrentEditor()
# 2: sync with SALOME GUI is a tab is opened
if editor:
- for case_editor, adao_case in self.cases.iteritems():
+ for case_editor, adao_case in list(self.cases.items()):
if case_editor is editor:
adaoGuiHelper.selectItem(adao_case.salome_study_item.GetID())
break
# dans le GUI d'Eficas
self.harmonizeSelectionFromEficas()
salomeStudyItem = adaoGuiHelper.getSelectedItem()
- for case_name, adao_case in self.cases.iteritems():
+ for case_name, adao_case in list(self.cases.items()):
if adao_case.salome_study_item.GetID() == salomeStudyItem.GetID():
if not adao_case.isOk():
adaoLogger.debug("Cas invalide, donc il est sauvegarde, mais il ne peut pas etre exporte vers YACS ensuite")
# dans le GUI d'Eficas
self.harmonizeSelectionFromEficas()
salomeStudyItem = adaoGuiHelper.getSelectedItem()
- for case_name, adao_case in self.cases.iteritems():
+ for case_name, adao_case in list(self.cases.items()):
if adao_case.salome_study_item.GetID() == salomeStudyItem.GetID():
if not adao_case.isOk():
adaoLogger.debug("Cas invalide, donc il est sauvegarde, mais il ne peut pas etre exporte vers YACS ensuite")
# dans le GUI d'Eficas
self.harmonizeSelectionFromEficas()
salomeStudyItem = adaoGuiHelper.getSelectedItem()
- for case_name, adao_case in self.cases.iteritems():
+ for case_name, adao_case in list(self.cases.items()):
if adao_case.salome_study_item.GetID() == salomeStudyItem.GetID():
self.eficas_manager.adaoFileClose(adao_case)
break
adaoLogger.debug("Validation du cas par un rapport sur le JDC")
self.harmonizeSelectionFromEficas()
salomeStudyItem = adaoGuiHelper.getSelectedItem()
- for case_name, adao_case in self.cases.iteritems():
+ for case_name, adao_case in list(self.cases.items()):
if adao_case.salome_study_item.GetID() == salomeStudyItem.GetID():
msg = adao_case.validationReportforJDC()
adaoGuiHelper.gui_information(SalomePyQt.SalomePyQt().getDesktop(), msg)
adaoLogger.debug("Validation du cas par un rapport sur le JDC")
self.harmonizeSelectionFromEficas()
salomeStudyItem = adaoGuiHelper.getSelectedItem()
- for case_name, adao_case in self.cases.iteritems():
+ for case_name, adao_case in list(self.cases.items()):
if adao_case.salome_study_item.GetID() == salomeStudyItem.GetID():
msg = adao_case.showTreeAdaoCase()
break
# dans le GUI d'Eficas
self.harmonizeSelectionFromEficas()
salomeStudyItem = adaoGuiHelper.getSelectedItem()
- for case_name, adao_case in self.cases.iteritems():
+ for case_name, adao_case in list(self.cases.items()):
if adao_case.salome_study_item.GetID() == salomeStudyItem.GetID():
if adao_case.isOk():
msg = adao_case.exportCaseToYACS()
"""
Main switch function for ui actions processing
"""
- if ACTIONS_MAP.has_key(actionId):
+ if actionId in ACTIONS_MAP:
try:
functionName = ACTIONS_MAP[actionId]
getattr(self,functionName)()
from salome.kernel import studyedit
-import adaoModuleHelper
+from . import adaoModuleHelper
#
# ==============================================================================
# Interface of an eficas observer (for implementing the subject/observer pattern)
# ==============================================================================
#
-from enumerate import Enumerate
+from .enumerate import Enumerate
class EficasObserver:
"""
Return true if this enumerate contains the specified key
@key a key to test
"""
- return (key in self._dict_keynumbers.keys())
+ return (key in list(self._dict_keynumbers.keys()))
def isValid(self, value):
- return (value in self._dict_keynumbers.values())
+ return (value in list(self._dict_keynumbers.values()))
def listkeys(self):
- list = self._dict_keynumbers.keys()
- list.sort()
+ list = sorted(list(self._dict_keynumbers.keys()))
return list
def listvalues(self):
- list = self._dict_keynumbers.values()
- list.sort()
+ list = sorted(list(self._dict_keynumbers.values()))
return list
#
'SEP',
'OTHER'
])
- print TYPES_LIST.listvalues()
+ print(TYPES_LIST.listvalues())
return True
def TEST_createFromList():
'MED',
'SMESH'])
- print codes.KERNEL
- print codes.GEOM
+ print(codes.KERNEL)
+ print(codes.GEOM)
if (codes.KERNEL == 0 and codes.GEOM == 2):
return True
else:
codes = Enumerate(aList.split())
- print codes.KERNEL
- print codes.GEOM
+ print(codes.KERNEL)
+ print(codes.GEOM)
if (codes.KERNEL == 0 and codes.GEOM == 2):
return True
else:
'MED',
'SMESH'])
- print "VISU in enumerate?", codes.contains("VISU")
+ print("VISU in enumerate?", codes.contains("VISU"))
if ( not codes.contains("VISU") ):
return True
else:
'MED',
'SMESH'], offset=20)
- print codes.KERNEL
- print codes.GEOM
+ print(codes.KERNEL)
+ print(codes.GEOM)
if (codes.KERNEL == 20 and codes.GEOM == 22):
return True
else:
'MED',
'SMESH'], offset=20)
- print codes.listvalues()
+ print(codes.listvalues())
if codes.listvalues() != [20,21,22,23,24]:
return False
return True
#
# Author: André Ribes, andre.ribes@edf.fr, EDF R&D
-from daOptimizerLoop import *
+from .daOptimizerLoop import *
import SALOMERuntime
import pilot
-import pickle, cPickle
+import pickle, pickle
import numpy
import threading
self.optim_algo.pool.pushInSample(local_counter, sample)
# 3: Wait
- while 1:
+ while True:
#print "waiting"
self.optim_algo.signalMasterAndWait()
#print "signal"
#print "sync false is not yet implemented"
self.optim_algo.setError("sync == false not yet implemented")
- def Tangent(self, (X, dX), sync = 1):
+ def Tangent(self, xxx_todo_changeme, sync = 1):
# print "Call Tangent OptimizerHooks"
+ (X, dX) = xxx_todo_changeme
if sync == 1:
# 1: Get a unique sample number
self.optim_algo.counter_lock.acquire()
self.optim_algo.pool.pushInSample(local_counter, sample)
# 3: Wait
- while 1:
+ while True:
self.optim_algo.signalMasterAndWait()
if self.optim_algo.isTerminationRequested():
self.optim_algo.pool.destroyAll()
#print "sync false is not yet implemented"
self.optim_algo.setError("sync == false not yet implemented")
- def Adjoint(self, (X, Y), sync = 1):
+ def Adjoint(self, xxx_todo_changeme1, sync = 1):
# print "Call Adjoint OptimizerHooks"
+ (X, Y) = xxx_todo_changeme1
if sync == 1:
# 1: Get a unique sample number
self.optim_algo.counter_lock.acquire()
self.optim_algo.pool.pushInSample(local_counter, sample)
# 3: Wait
- while 1:
+ while True:
#print "waiting"
self.optim_algo.signalMasterAndWait()
#print "signal"
#print "[Debug] Input is ", input
str_da_study = input.getStringValue()
try:
- self.da_study = cPickle.loads(str_da_study)
+ self.da_study = pickle.loads(str_da_study)
except ValueError as e:
raise ValueError("\n\n Handling internal error in study exchange (message: \"%s\").\n The case is probably too big (bigger than the physical plus the virtual memory available).\n Try if possible to store the covariance matrices in sparse format.\n"%(str(e),))
#print "[Debug] da_study is ", self.da_study
# print "Not setting Hooks for EvolutionModel"
# Set Observers
- for observer_name in self.da_study.observers_dict.keys():
+ for observer_name in list(self.da_study.observers_dict.keys()):
# print "observers %s found" % observer_name
self.has_observer = True
if self.da_study.observers_dict[observer_name]["scheduler"] != "":
self.ADD.setDataObserver(observer_name, HookFunction=self.obs, HookParameters = observer_name)
# Start Assimilation Study
- print "Launching the analysis\n"
+ print("Launching the analysis\n")
try:
self.ADD.analyze()
except Exception as e:
- if type(e) == type(SyntaxError()): msg = "at %s: %s"%(e.offset, e.text)
+ if isinstance(e, type(SyntaxError())): msg = "at %s: %s"%(e.offset, e.text)
else: msg = ""
raise ValueError("during execution, the following error occurs:\n\n%s %s\n\nSee also the potential messages, which can show the origin of the above error, in the YACS GUI or in the launching terminal."%(str(e),msg))
# Remove Data Observer, so you can ...
var.removeDataObserver(self.obs)
# Pickle then ...
- var_str = cPickle.dumps(var)
+ var_str = pickle.dumps(var)
# Add Again Data Observer
if self.da_study.observers_dict[info]["scheduler"] != "":
self.ADD.setDataObserver(info, HookFunction=self.obs, Scheduler = self.da_study.observers_dict[info]["scheduler"], HookParameters = info)
# Wait
import sys, traceback
try:
- while 1:
+ while True:
self.signalMasterAndWait()
if self.isTerminationRequested():
self.pool.destroyAll()
self.counter_lock.release()
break
except:
- print "Exception in user code:"
- print '-'*60
+ print("Exception in user code:")
+ print('-'*60)
traceback.print_exc(file=sys.stdout)
- print '-'*60
+ print('-'*60)
def getAlgoResult(self):
# Remove data observers, required to pickle assimilation study object
- for observer_name in self.da_study.observers_dict.keys():
+ for observer_name in list(self.da_study.observers_dict.keys()):
self.ADD.removeDataObserver(observer_name, self.obs)
self.ADD.prepare_to_pickle()
result = pickle.dumps(self.da_study) # Careful : pickle is mandatory over cPickle !
# Repertory
check_repertory = False
repertory = ""
- if "Repertory" in study_config.keys():
+ if "Repertory" in list(study_config.keys()):
repertory = study_config["Repertory"]
check_repertory = True
if not os.path.isabs(repertory):
# Check if all the data is provided
for key in AlgoDataRequirements[study_config["Algorithm"]]:
- if key not in study_config.keys():
+ if key not in list(study_config.keys()):
raise ValueError("\n\nCannot find " + key + " in your study configuration !" +
"\n This key is mandatory into a study with " + study_config["Algorithm"] + " algorithm." +
"\n " + study_config["Algorithm"] + " requirements are " + str(AlgoDataRequirements[study_config["Algorithm"]]) + "\n")
# Data
- for key in study_config.keys():
+ for key in list(study_config.keys()):
if key in AssimData:
check_data(key, study_config[key], check_repertory, repertory)
# UserDataInit
- if "UserDataInit" in study_config.keys():
+ if "UserDataInit" in list(study_config.keys()):
check_data("UserDataInit", study_config["UserDataInit"], check_repertory, repertory)
# Variables
check_variables("OutputVariables", study_config)
# Analyse
- if "UserPostAnalysis" in study_config.keys():
+ if "UserPostAnalysis" in list(study_config.keys()):
analysis_config = study_config["UserPostAnalysis"]
if "From" not in analysis_config:
raise ValueError("\n\n UserPostAnalysis found but From is not defined \n in the analysis configuration!\n")
" The given user file is:\n %s\n" % check_file_name)
# Check observers
- if "Observers" in study_config.keys():
+ if "Observers" in list(study_config.keys()):
for obs_var in study_config["Observers"]:
# Check du type
if not isinstance(study_config["Observers"][obs_var], type({})):
raise ValueError("\n\n An observer description has to be a Python dictionary\n"+
" Observer is %s\n" % obs_var)
- if "nodetype" not in study_config["Observers"][obs_var].keys():
+ if "nodetype" not in list(study_config["Observers"][obs_var].keys()):
raise ValueError("\n\n An observer description must provide a nodetype\n"+
" Observer is %s\n" % obs_var)
nodetype = study_config["Observers"][obs_var]["nodetype"]
raise ValueError("\n\n An observer nodetype must be equal to 'String' or 'Script'\n"+
" Observer is %s\n" % obs_var)
if nodetype == "String":
- if "String" not in study_config["Observers"][obs_var].keys():
+ if "String" not in list(study_config["Observers"][obs_var].keys()):
raise ValueError("\n\n An observer with nodetype String must provide a String\n"+
" Observer is %s\n" % obs_var)
if not isinstance(study_config["Observers"][obs_var]["String"], type("")):
raise ValueError("\n\n An observer String description must be a string\n"+
" Observer is %s\n" % obs_var)
if nodetype == "Script":
- if "Script" not in study_config["Observers"][obs_var].keys():
+ if "Script" not in list(study_config["Observers"][obs_var].keys()):
raise ValueError("\n\n An observer with nodetype Script provide a Script\n"+
" Observer is %s\n" % obs_var)
if not isinstance(study_config["Observers"][obs_var]["Script"], type("")):
raise ValueError("\n\n An observer Script description must be a string\n"+
" Observer is %s\n" % obs_var)
- if "scheduler" in study_config["Observers"][obs_var].keys():
+ if "scheduler" in list(study_config["Observers"][obs_var].keys()):
if not isinstance(study_config["Observers"][obs_var]["scheduler"], type("")):
raise ValueError("\n\n An observer scheduler description must be a string\n"+
" Observer is %s\n" % obs_var)
def check_variables(name, study_config):
- if name not in study_config.keys():
+ if name not in list(study_config.keys()):
raise ValueError("\n\n %s not found in your study configuration!\n" % name)
variable_config = study_config[name]
- if "Order" not in variable_config.keys():
+ if "Order" not in list(variable_config.keys()):
raise ValueError("\n\n Order not found in the %s configuration!\n" % name)
list_of_variables = variable_config["Order"]
raise ValueError("\n\nOrder should contain one or more names in the %s configuration!\n" % name)
for var in list_of_variables:
- if var not in variable_config.keys():
+ if var not in list(variable_config.keys()):
raise ValueError("\n\n Variable %s not found in the %s configuration!\n" % name)
value = variable_config[var]
try:
t_bool = proc.getTypeCode("bool")
t_param_input = proc.getTypeCode("SALOME_TYPES/ParametricInput")
t_param_output = proc.getTypeCode("SALOME_TYPES/ParametricOutput")
- if "Repertory" in study_config.keys():
+ if "Repertory" in list(study_config.keys()):
base_repertory = study_config["Repertory"]
repertory = True
else:
CAS_node = factory_CAS_node.cloneNode("CreateAssimilationStudy")
CAS_node.getInputPort("Name").edInitPy(study_config["Name"])
CAS_node.getInputPort("Algorithm").edInitPy(study_config["Algorithm"])
- if study_config.has_key("Debug") and study_config["Debug"] == "1":
+ if "Debug" in study_config and study_config["Debug"] == "1":
CAS_node.getInputPort("Debug").edInitPy(True)
else:
CAS_node.getInputPort("Debug").edInitPy(False)
# Adding an observer init node if an user defines some
factory_init_observers_node = catalogAd.getNodeFromNodeMap("SetObserversNode")
init_observers_node = factory_init_observers_node.cloneNode("SetObservers")
- if "Observers" in study_config.keys():
+ if "Observers" in list(study_config.keys()):
node_script = init_observers_node.getScript()
node_script += "has_observers = True\n"
node_script += "observers = " + str(study_config["Observers"]) + "\n"
# Step 0.5: Find if there is a user init node
init_config = {}
init_config["Target"] = []
- if "UserDataInit" in study_config.keys():
+ if "UserDataInit" in list(study_config.keys()):
init_config = study_config["UserDataInit"]
factory_init_node = catalogAd.getNodeFromNodeMap("UserDataInitFromScript")
init_node = factory_init_node.cloneNode("UserDataInit")
# Step 1: get input data from user configuration
- st_keys = study_config.keys()
- st_keys.sort()
+ st_keys = sorted(list(study_config.keys()))
for key in st_keys:
- ad_keys = AssimData
- ad_keys.sort()
+ ad_keys = sorted(AssimData)
if key in ad_keys:
data_config = study_config[key]
node_script += """ Function = DirectOperator,\n"""
node_script += """ increment = %s,\n"""%str(ScriptWithOneFunction['DifferentialIncrement'])
node_script += """ centeredDF = %s,\n"""%str(ScriptWithOneFunction['CenteredFiniteDifference'])
- if 'EnableMultiProcessing' in ScriptWithOneFunction.keys():
+ if 'EnableMultiProcessing' in list(ScriptWithOneFunction.keys()):
node_script += """ mpEnabled = %s,\n"""%str(ScriptWithOneFunction['EnableMultiProcessing'])
- if 'NumberOfProcesses' in ScriptWithOneFunction.keys():
+ if 'NumberOfProcesses' in list(ScriptWithOneFunction.keys()):
node_script += """ mpWorkers = %s,\n"""%str(ScriptWithOneFunction['NumberOfProcesses'])
node_script += """ )\n"""
node_script += """#\n"""
opt_script_nodeOO = factory_opt_script_node.cloneNode("FakeFunctionNode")
# Check if we have a python script for OptimizerLoopNode
- if "EvolutionModel" in study_config.keys():
+ if "EvolutionModel" in list(study_config.keys()):
data_config = study_config["EvolutionModel"]
opt_script_nodeEM = None
if data_config["Type"] == "Function" and (data_config["From"] == "ScriptWithSwitch" or data_config["From"] == "FunctionDict"):
node_script += """ Function = DirectOperator,\n"""
node_script += """ increment = %s,\n"""%str(ScriptWithOneFunction['DifferentialIncrement'])
node_script += """ centeredDF = %s,\n"""%str(ScriptWithOneFunction['CenteredFiniteDifference'])
- if 'EnableMultiProcessing' in ScriptWithOneFunction.keys():
+ if 'EnableMultiProcessing' in list(ScriptWithOneFunction.keys()):
node_script += """ mpEnabled = %s,\n"""%str(ScriptWithOneFunction['EnableMultiProcessing'])
- if 'NumberOfProcesses' in ScriptWithOneFunction.keys():
+ if 'NumberOfProcesses' in list(ScriptWithOneFunction.keys()):
node_script += """ mpWorkers = %s,\n"""%str(ScriptWithOneFunction['NumberOfProcesses'])
node_script += """ )\n"""
node_script += """#\n"""
opt_script_nodeEM = factory_opt_script_node.cloneNode("FakeFunctionNode")
# Add computation bloc
- if "Observers" in study_config.keys():
+ if "Observers" in list(study_config.keys()):
execution_bloc = runtime.createBloc("Execution Bloc")
optimizer_node.edSetNode(execution_bloc)
ADAO_Case.edAddDFLink(opt_script_nodeOO.getOutputPort("result"), optimizer_node.edGetPortForOutPool())
# Second case: evolution bloc
- if "EvolutionModel" in study_config.keys():
+ if "EvolutionModel" in list(study_config.keys()):
computation_blocEM = runtime.createBloc("computation_blocEM")
computation_blocEM.edAddChild(opt_script_nodeEM)
switch_node.edSetNode(2, computation_blocEM)
ADAO_Case.edAddCFLink(observation_node, end_observation_node)
ADAO_Case.edAddDFLink(end_observation_node.getOutputPort("output"), optimizer_node.edGetPortForOutPool())
- elif "EvolutionModel" in study_config.keys():
+ elif "EvolutionModel" in list(study_config.keys()):
execution_bloc = runtime.createBloc("Execution Bloc")
optimizer_node.edSetNode(execution_bloc)
ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), opt_script_nodeOO.getInputPort("init_data"))
# Step 4: create post-processing from user configuration
- if "UserPostAnalysis" in study_config.keys():
+ if "UserPostAnalysis" in list(study_config.keys()):
analysis_config = study_config["UserPostAnalysis"]
if analysis_config["From"] == "String":
factory_analysis_node = catalogAd.getNodeFromNodeMap("SimpleUserAnalysis")
# Import config_file
try:
- execfile(config_file)
+ exec(compile(open(config_file).read(), config_file, 'exec'))
except:
raise ValueError("\n\n Exception in loading %s"%config_file)
for s in ['+', 'rc1', 'rc2', 'rc3']:
v1 = v1.replace(s,'',1)
v2 = v2.replace(s,'',1)
- v11,v12,v13 = map(float,v1.split('.'))
- v21,v22,v23 = map(float,v2.split('.'))
+ v11,v12,v13 = list(map(float,v1.split('.')))
+ v21,v22,v23 = list(map(float,v2.split('.')))
lv1 = 1e6*v11 + 1e3*v12 + v13
lv2 = 1e6*v21 + 1e3*v22 + v23
return lv1 >= lv2
def minimalVersion():
"Description"
- print " Les versions minimales attendues sont :"
- print " - Python systeme....: %s"%minimal_python_version
- print " - Numpy.............: %s"%minimal_numpy_version
- print " - Scipy.............: %s"%minimal_scipy_version
- print " - Matplotlib........: %s"%minimal_matplotlib_version
- print
+ print(" Les versions minimales attendues sont :")
+ print(" - Python systeme....: %s"%minimal_python_version)
+ print(" - Numpy.............: %s"%minimal_numpy_version)
+ print(" - Scipy.............: %s"%minimal_scipy_version)
+ print(" - Matplotlib........: %s"%minimal_matplotlib_version)
+ print()
import sys
def testSysteme():
"Test des versions de modules"
- print " Les versions disponibles sont :"
+ print(" Les versions disponibles sont :")
v=sys.version.split()
- print " - Python systeme....: %s"%v[0]
+ print(" - Python systeme....: %s"%v[0])
assert compare_versions(sys.version.split()[0], minimal_python_version)
#
try:
import numpy
- print " - Numpy.............: %s"%numpy.version.version
+ print(" - Numpy.............: %s"%numpy.version.version)
assert compare_versions(numpy.version.version, minimal_numpy_version)
except ImportError:
return 1
#
try:
import scipy
- print " - Scipy.............: %s"%scipy.version.version
+ print(" - Scipy.............: %s"%scipy.version.version)
assert compare_versions(scipy.version.version, minimal_scipy_version)
except ImportError:
return 1
try:
import matplotlib
mplversion = matplotlib.__version__
- print " - Matplotlib........: %s"%mplversion
+ print(" - Matplotlib........: %s"%mplversion)
assert compare_versions(mplversion, minimal_matplotlib_version)
#
- print
+ print()
backends_OK = []
backends_KO = []
backend_now = matplotlib.get_backend()
except ValueError:
backends_KO.append(backend)
#
- print " Backends disponibles pour Matplotlib %s :"%mplversion
- print " Defaut initial......: '%s'"%backend_now
- print " Fonctionnant........:"
+ print(" Backends disponibles pour Matplotlib %s :"%mplversion)
+ print(" Defaut initial......: '%s'"%backend_now)
+ print(" Fonctionnant........:")
for b in backends_OK:
- print " '%s'"%b
- print " Non fonctionnant....:"
+ print(" '%s'"%b)
+ print(" Non fonctionnant....:")
for b in backends_KO:
- print " '%s'"%b
- print " (Le backend 'bidon' n'est ici que pour verifier le test, il n'existe pas)"
+ print(" '%s'"%b)
+ print(" (Le backend 'bidon' n'est ici que pour verifier le test, il n'existe pas)")
except ImportError:
pass
- print
+ print()
#
return 0
# ==============================================================================
if __name__ == "__main__":
- print '\n AUTODIAGNOSTIC \n'
+ print('\n AUTODIAGNOSTIC \n')
minimalVersion()
sys.exit(testSysteme())
numpy.set_printoptions(precision=5)
def testSysteme():
- print " Les caracteristiques des applications et outils systeme :"
- import sys ; v=sys.version.split() ; print " - Python systeme....: %s"%v[0]
- import numpy ; print " - Numpy.............: %s"%numpy.version.version
+ print(" Les caracteristiques des applications et outils systeme :")
+ import sys ; v=sys.version.split() ; print(" - Python systeme....: %s"%v[0])
+ import numpy ; print(" - Numpy.............: %s"%numpy.version.version)
try:
- import scipy ; print " - Scipy.............: %s"%scipy.version.version
+ import scipy ; print(" - Scipy.............: %s"%scipy.version.version)
except:
- print " - Scipy.............: %s"%("absent",)
+ print(" - Scipy.............: %s"%("absent",))
try:
- import numpy.distutils.system_info as sysinfo ; la = sysinfo.get_info('lapack') ; print " - Lapack............: %s/lib%s.so"%(la['library_dirs'][0],la['libraries'][0])
+ import numpy.distutils.system_info as sysinfo ; la = sysinfo.get_info('lapack') ; print(" - Lapack............: %s/lib%s.so"%(la['library_dirs'][0],la['libraries'][0]))
except:
- print " - Lapack............: %s"%("absent",)
- print
+ print(" - Lapack............: %s"%("absent",))
+ print()
return True
def testNumpy01(dimension = 3, precision = 1.e-17, repetitions = 10):
"Test Numpy"
__d = int(dimension)
- print " Taille du test..................................: %.0e"%__d
+ print(" Taille du test..................................: %.0e"%__d)
t_init = time.time()
A = numpy.array([numpy.arange(dimension)+1.,]*__d)
x = numpy.arange(__d)+1.
- print " La duree elapsed moyenne de l'initialisation est: %4.1f s"%(time.time()-t_init)
+ print(" La duree elapsed moyenne de l'initialisation est: %4.1f s"%(time.time()-t_init))
#
t_init = time.time()
for i in range(repetitions):
b = numpy.dot(A,x)
- print " La duree elapsed pour %3i produits est de.......: %4.1f s"%(repetitions, time.time()-t_init)
+ print(" La duree elapsed pour %3i produits est de.......: %4.1f s"%(repetitions, time.time()-t_init))
r = [__d*(__d+1.)*(2.*__d+1.)/6.,]*__d
if max(abs(b-r)) > precision:
raise ValueError("Resultat du test errone (1)")
else:
- print " Test correct, erreur maximale inferieure a %s"%precision
- print
+ print(" Test correct, erreur maximale inferieure a %s"%precision)
+ print()
del A, x, b
def testNumpy02(dimension = 3, precision = 1.e-17, repetitions = 100):
"Test Numpy"
__d = int(dimension)
- print " Taille du test..................................: %.0e"%__d
+ print(" Taille du test..................................: %.0e"%__d)
t_init = time.time()
A = numpy.random.normal(0.,1.,size=(__d,__d))
x = numpy.random.normal(0.,1.,size=(__d,))
- print " La duree elapsed moyenne de l'initialisation est: %4.1f s"%(time.time()-t_init)
+ print(" La duree elapsed moyenne de l'initialisation est: %4.1f s"%(time.time()-t_init))
#
t_init = time.time()
for i in range(repetitions):
b = numpy.dot(A,x)
- print " La duree elapsed pour %3i produits est de.......: %4.1f s"%(repetitions, time.time()-t_init)
- print
+ print(" La duree elapsed pour %3i produits est de.......: %4.1f s"%(repetitions, time.time()-t_init))
+ print()
del A, x, b
# ==============================================================================
if __name__ == "__main__":
- print '\n AUTODIAGNOSTIC \n'
+ print('\n AUTODIAGNOSTIC \n')
testSysteme()
numpy.random.seed(1000)
testNumpy01(dimension = 1.e4)
testNumpy02(dimension = 3.e3)
- print
+ print()
# ==============================================================================
if __name__ == "__main__":
- print '\n AUTODIAGNOSTIC \n'
- print """Exemple de la doc :
+ print('\n AUTODIAGNOSTIC \n')
+ print("""Exemple de la doc :
Un exemple simple de creation d'un cas de calcul TUI ADAO
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++
- """
+ """)
xa = test1()
assertAlmostEqualArrays(xa, [0.25, 0.80, 0.95], places = 5)
def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None):
"Compare two vectors, like unittest.assertAlmostEqual"
if msg is not None:
- print msg
+ print(msg)
if delta is not None:
if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any():
raise AssertionError("%s != %s within %s places"%(first,second,delta))
# ==============================================================================
if __name__ == "__main__":
- print '\n AUTODIAGNOSTIC \n'
- print """Exemple de la doc :
+ print('\n AUTODIAGNOSTIC \n')
+ print("""Exemple de la doc :
Creation detaillee d'un cas de calcul TUI ADAO
++++++++++++++++++++++++++++++++++++++++++++++
Les deux resultats sont testes pour etre identiques.
- """
+ """)
xa1 = test1()
xa2 = test2()
ecart = assertAlmostEqualArrays(xa1, xa2, places = 15)
- print " Difference maximale entre les deux : %.2e"%ecart
+ print(" Difference maximale entre les deux : %.2e"%ecart)
def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None):
"Compare two vectors, like unittest.assertAlmostEqual"
if msg is not None:
- print msg
+ print(msg)
if delta is not None:
if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any():
raise AssertionError("%s != %s within %s places"%(first,second,delta))
Xoptimum = case.get("Analysis")[-1]
FX_at_optimum = case.get("SimulatedObservationAtOptimum")[-1]
J_values = case.get("CostFunctionJ")[:]
- print
- print "Number of internal iterations...: %i"%len(J_values)
- print "Initial state...................:",numpy.ravel(Xbackground)
- print "Optimal state...................:",numpy.ravel(Xoptimum)
- print "Simulation at optimal state.....:",numpy.ravel(FX_at_optimum)
- print
+ print()
+ print("Number of internal iterations...: %i"%len(J_values))
+ print("Initial state...................:",numpy.ravel(Xbackground))
+ print("Optimal state...................:",numpy.ravel(Xoptimum))
+ print("Simulation at optimal state.....:",numpy.ravel(FX_at_optimum))
+ print()
#
return case.get("Analysis")[-1]
# ==============================================================================
if __name__ == "__main__":
- print '\n AUTODIAGNOSTIC \n'
- print """Exemple de la doc :
+ print('\n AUTODIAGNOSTIC \n')
+ print("""Exemple de la doc :
Exploitation independante des resultats d'un cas de calcul
++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
- """
+ """)
xa = test1()
assertAlmostEqualArrays(xa, [ 2., 3., 4.])
def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None):
"Compare two vectors, like unittest.assertAlmostEqual"
if msg is not None:
- print msg
+ print(msg)
if delta is not None:
if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any():
raise AssertionError("%s != %s within %s places"%(first,second,delta))
"""Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur matriciel)"""
Xa = {}
for algo in ("3DVAR", "Blue", "ExtendedBlue", "LinearLeastSquares", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
- print
+ print()
msg = "Algorithme en test : %s"%algo
- print msg+"\n"+"-"*len(msg)
+ print(msg+"\n"+"-"*len(msg))
#
adaopy = adaoBuilder.New()
adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
del adaopy
#
for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
- print
+ print()
msg = "Algorithme en test : %s"%algo
- print msg+"\n"+"-"*len(msg)
+ print(msg+"\n"+"-"*len(msg))
#
adaopy = adaoBuilder.New()
adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, })
del adaopy
#
for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
- print
+ print()
msg = "Algorithme en test : %s"%algo
- print msg+"\n"+"-"*len(msg)
+ print(msg+"\n"+"-"*len(msg))
#
adaopy = adaoBuilder.New()
adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000, })
del adaopy
#
for algo in ("EnsembleBlue", ):
- print
+ print()
msg = "Algorithme en test : %s"%algo
- print msg+"\n"+"-"*len(msg)
+ print(msg+"\n"+"-"*len(msg))
#
adaopy = adaoBuilder.New()
adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"SetSeed":1000, })
Xa[algo] = adaopy.get("Analysis")[-1]
del adaopy
#
- print
+ print()
msg = "Tests des ecarts attendus :"
- print msg+"\n"+"="*len(msg)
+ print(msg+"\n"+"="*len(msg))
verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa)
verify_similarity_of_algo_results(("LinearLeastSquares", "NonLinearLeastSquares"), Xa)
verify_similarity_of_algo_results(("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter"), Xa)
- print " Les resultats obtenus sont corrects."
- print
+ print(" Les resultats obtenus sont corrects.")
+ print()
#
return 0
M = numpy.matrix("1 0 0;0 2 0;0 0 3")
def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"):
- print
+ print()
msg = "Algorithme en test : %s"%algo
- print msg+"\n"+"-"*len(msg)
+ print(msg+"\n"+"-"*len(msg))
#
adaopy = adaoBuilder.New()
adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]})
M = numpy.matrix("1 0 0;0 2 0;0 0 3")
def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"):
- print
+ print()
msg = "Algorithme en test : %s"%algo
- print msg+"\n"+"-"*len(msg)
+ print(msg+"\n"+"-"*len(msg))
#
adaopy = adaoBuilder.New()
adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, })
M = numpy.matrix("1 0 0;0 1 0;0 0 1")
def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T
for algo in ("ParticleSwarmOptimization", "QuantileRegression", ):
- print
+ print()
msg = "Algorithme en test : %s"%algo
- print msg+"\n"+"-"*len(msg)
+ print(msg+"\n"+"-"*len(msg))
#
adaopy = adaoBuilder.New()
adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000, })
Xa[algo] = adaopy.get("Analysis")[-1]
del adaopy
#
- print
+ print()
msg = "Tests des ecarts attendus :"
- print msg+"\n"+"="*len(msg)
+ print(msg+"\n"+"="*len(msg))
verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa)
verify_similarity_of_algo_results(("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter"), Xa)
- print " Les resultats obtenus sont corrects."
- print
+ print(" Les resultats obtenus sont corrects.")
+ print()
#
return 0
def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""):
"""Comparaison de deux vecteurs"""
- print " Difference maximale %s: %.2e"%(msg, max(abs(v2 - v1)))
+ print(" Difference maximale %s: %.2e"%(msg, max(abs(v2 - v1))))
return max(abs(v2 - v1)) < precision
def verify_similarity_of_algo_results(serie = [], Xa = {}):
- print " Comparaisons :"
+ print(" Comparaisons :")
for algo1 in serie:
for algo2 in serie:
if algo1 is algo2: break
assert almost_equal_vectors( Xa[algo1], Xa[algo2], 5.e-5, "entre %s et %s "%(algo1, algo2) )
- print " Algorithmes dont les resultats sont similaires : %s\n"%(serie,)
+ print(" Algorithmes dont les resultats sont similaires : %s\n"%(serie,))
#===============================================================================
if __name__ == "__main__":
- print '\n AUTODIAGNOSTIC \n'
+ print('\n AUTODIAGNOSTIC \n')
test1()
test2()
# ==============================================================================
def test1():
for algo in ("AdjointTest", "FunctionTest", "GradientTest", "LinearityTest", "TangentTest"):
- print
+ print()
msg = "Algorithme en test : %s"%algo
- print msg+"\n"+"-"*len(msg)
+ print(msg+"\n"+"-"*len(msg))
#
adaopy = adaoBuilder.New()
adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10,"NumberOfRepetition":2, "SetSeed":1000})
del adaopy
#
for algo in ("ObserverTest", ):
- print
+ print()
msg = "Algorithme en test : %s"%algo
- print msg+"\n"+"-"*len(msg)
+ print(msg+"\n"+"-"*len(msg))
#
adaopy = adaoBuilder.New()
adaopy.setAlgorithmParameters(Algorithm=algo)
del adaopy
#
for algo in ("SamplingTest", ):
- print
+ print()
msg = "Algorithme en test : %s"%algo
- print msg+"\n"+"-"*len(msg)
+ print(msg+"\n"+"-"*len(msg))
#
adaopy = adaoBuilder.New()
adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={
# ==============================================================================
if __name__ == "__main__":
- print '\n AUTODIAGNOSTIC \n'
+ print('\n AUTODIAGNOSTIC \n')
test1()