From: Gilles DAVID Date: Thu, 30 Mar 2017 17:23:34 +0000 (+0200) Subject: Porting to Python 3 (1st draft) X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=611d72c8748648bbea864e5bac51f44737bd7a06;p=modules%2Fadao.git Porting to Python 3 (1st draft) --- diff --git a/adm_local/check_scipy.m4 b/adm_local/check_scipy.m4 index 0c73362..5b99335 100644 --- a/adm_local/check_scipy.m4 +++ b/adm_local/check_scipy.m4 @@ -19,11 +19,11 @@ dnl dnl Author: André Ribes, andre.ribes@edf.fr, EDF R&D AC_DEFUN([CHECK_SCIPY],[ -AC_REQUIRE([CHECK_PYTHON])dnl +AC_REQUIRE([CHECK_PYTHON3])dnl scipy_ok=no -scipydir=`$PYTHON -c "import scipy;print scipy.get_include()" 2>/dev/null` +scipydir=`$PYTHON -c "import scipy;print(scipy.get_include())" 2>/dev/null` if test -d "$scipydir"; then scipy_ok=yes diff --git a/bin/AdaoCatalogGenerator.py b/bin/AdaoCatalogGenerator.py index cfa54e2..ead1af1 100644 --- a/bin/AdaoCatalogGenerator.py +++ b/bin/AdaoCatalogGenerator.py @@ -26,7 +26,7 @@ import logging import traceback import sys import string -import StringIO +import io import module_version @@ -110,27 +110,27 @@ def AdjointOperatorInNS(filename): AdjointOperatorInNS.info = u"The Python file has to contain explicitly an \\"AdjointOperator\\" function definition with only one pair of vectors as argument." """%(module_version.name,module_version.version) -# Important : validators=[...] pour que les conditions soient traitées simultanément, en "ET", et pas en "OU" (choisi dans le cas du tuple à la place de la liste) +# Important : validators=[...] pour que les conditions soient traitées simultanément, en "ET", et pas en "OU" (choisi dans le cas du tuple à la place de la liste) # validators=[OnlyStr(), FileExtVal('py'), FunctionVal(fv)] data_method = """ def F_${data_name}(statut, fv=NoCheckInNS) : return FACT( statut = statut, FROM = SIMP(statut = "o", typ = "TXM", into=(${data_into}), defaut=${data_default}), SCRIPT_DATA = BLOC ( condition = " FROM in ( 'Script', ) ", - SCRIPT_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py'), FunctionVal(fv)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant si nécessaire la définition d'une variable interne de même nom que le concept parent", ang="Waiting for a script file name, with or without the full path to find it, containing if necessary the definition of an internal variable of the same name as the parent concept"), + SCRIPT_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py'), FunctionVal(fv)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant si nécessaire la définition d'une variable interne de même nom que le concept parent", ang="Waiting for a script file name, with or without the full path to find it, containing if necessary the definition of an internal variable of the same name as the parent concept"), ), STRING_DATA = BLOC ( condition = " FROM in ( 'String', ) ", - STRING = SIMP(statut = "o", typ = "TXM",${ms_default} fr="En attente d'une chaine de caractères entre guillements. Pour construire un vecteur ou une matrice, ce doit être une suite de nombres, utilisant un espace ou une virgule pour séparer deux éléments et un point-virgule pour séparer deux lignes", ang="Waiting for a string in quotes. To build a vector or a matrix, it has to be a float serie, using a space or comma to separate two elements in a line, a semi-colon to separate rows"), + STRING = SIMP(statut = "o", typ = "TXM",${ms_default} fr="En attente d'une chaine de caractères entre guillements. Pour construire un vecteur ou une matrice, ce doit être une suite de nombres, utilisant un espace ou une virgule pour séparer deux éléments et un point-virgule pour séparer deux lignes", ang="Waiting for a string in quotes. To build a vector or a matrix, it has to be a float serie, using a space or comma to separate two elements in a line, a semi-colon to separate rows"), ), SCRIPTWITHFUNCTIONS_DATA = BLOC ( condition = " FROM in ( 'ScriptWithFunctions', ) ", - SCRIPTWITHFUNCTIONS_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py'), FunctionVal(DirectOperatorInNS), FunctionVal(TangentOperatorInNS), FunctionVal(AdjointOperatorInNS)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant en variables internes trois fonctions de calcul nommées DirectOperator, TangentOperator et AdjointOperator", ang="Waiting for a script file name, with or without the full path to find it, containing as internal variables three computation functions named DirectOperator, TangentOperator and AdjointOperator"), + SCRIPTWITHFUNCTIONS_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py'), FunctionVal(DirectOperatorInNS), FunctionVal(TangentOperatorInNS), FunctionVal(AdjointOperatorInNS)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant en variables internes trois fonctions de calcul nommées DirectOperator, TangentOperator et AdjointOperator", ang="Waiting for a script file name, with or without the full path to find it, containing as internal variables three computation functions named DirectOperator, TangentOperator and AdjointOperator"), ), SCRIPTWITHONEFUNCTION_DATA = BLOC ( condition = " FROM in ( 'ScriptWithOneFunction', ) ", - SCRIPTWITHONEFUNCTION_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py'), FunctionVal(DirectOperatorInNS)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant en variable interne une seule fonction de calcul nommée DirectOperator", ang="Waiting for a script file name, with or without the full path to find it, containing as internal variable only one function named DirectOperator"), - DifferentialIncrement = SIMP(statut="o", typ = "R", val_min=0, val_max=1, defaut=0.01, fr="Incrément de la perturbation dX pour calculer la dérivée, construite en multipliant X par l'incrément en évitant les valeurs nulles", ang="Increment of dX perturbation to calculate the derivative, build multiplying X by the increment avoiding null values"), - CenteredFiniteDifference = SIMP(statut="o", typ = "I", into=(0, 1), defaut=0, fr="Formulation centrée (1) ou décentrée (0) pour la méthode des différences finies", ang="Centered (1) or uncentered (0) formulation for the finite differences method"), - EnableMultiProcessing = SIMP(statut="f", typ = "I", into=(0, 1), defaut=0, fr="Calculs élémentaires effectués en séquentiel (0) ou en parallèle (1) dans la méthode des différences finies", ang="Elementary calculations done sequentially (0) or in parallel (1) in the finite differences method"), - NumberOfProcesses = SIMP(statut="f", typ = "I", val_min=0, defaut=0, fr="Nombre de processus parallèles, 0 pour un contrôle automatique", ang="Number of parallel processes, 0 for automatic control"), + SCRIPTWITHONEFUNCTION_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py'), FunctionVal(DirectOperatorInNS)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant en variable interne une seule fonction de calcul nommée DirectOperator", ang="Waiting for a script file name, with or without the full path to find it, containing as internal variable only one function named DirectOperator"), + DifferentialIncrement = SIMP(statut="o", typ = "R", val_min=0, val_max=1, defaut=0.01, fr="Incrément de la perturbation dX pour calculer la dérivée, construite en multipliant X par l'incrément en évitant les valeurs nulles", ang="Increment of dX perturbation to calculate the derivative, build multiplying X by the increment avoiding null values"), + CenteredFiniteDifference = SIMP(statut="o", typ = "I", into=(0, 1), defaut=0, fr="Formulation centrée (1) ou décentrée (0) pour la méthode des différences finies", ang="Centered (1) or uncentered (0) formulation for the finite differences method"), + EnableMultiProcessing = SIMP(statut="f", typ = "I", into=(0, 1), defaut=0, fr="Calculs élémentaires effectués en séquentiel (0) ou en parallèle (1) dans la méthode des différences finies", ang="Elementary calculations done sequentially (0) or in parallel (1) in the finite differences method"), + NumberOfProcesses = SIMP(statut="f", typ = "I", val_min=0, defaut=0, fr="Nombre de processus parallèles, 0 pour un contrôle automatique", ang="Number of parallel processes, 0 for automatic control"), ), SCRIPTWITHSWITCH_DATA = BLOC ( condition = " FROM in ( 'ScriptWithSwitch', ) ", SCRIPTWITHSWITCH_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py')], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant un switch pour les calculs direct, tangent et adjoint", ang="Waiting for a script file name, with or without the full path to find it, containing a switch for direct, tangent and adjoint computations"), @@ -318,7 +318,7 @@ one_algo_choices = string.Template(one_algo_choices) #----------- Begin generation script -----------# -print "-- Starting AdaoCalatogGenerator.py --" +print("-- Starting AdaoCalatogGenerator.py --") try: import daEficas @@ -349,7 +349,7 @@ catalog_path = args[0] catalog_name = args[1] # Generates into a string -mem_file = StringIO.StringIO() +mem_file = io.StringIO() # Start file from time import strftime @@ -374,7 +374,7 @@ for algo_name in algos_list: # Step 1: A partir des infos, on cree les fonctions qui vont permettre # d'entrer les donnees utilisateur -for data_input_name in infos.DataTypeDict.keys(): +for data_input_name in infos.DataTypeDict: logging.debug('A data input Type is found: ' + data_input_name) data_name = data_input_name data_into = "" @@ -387,7 +387,7 @@ for data_input_name in infos.DataTypeDict.keys(): # On choisit le default data_default = "\"" + infos.DataTypeDefaultDict[data_input_name] + "\"" - if infos.DataSValueDefaultDict.has_key(data_input_name): + if data_input_name in infos.DataSValueDefaultDict: ms_default = " defaut=\"" + infos.DataSValueDefaultDict[data_input_name] + "\"," mem_file.write(data_method.substitute(data_name = data_name, @@ -397,9 +397,9 @@ for data_input_name in infos.DataTypeDict.keys(): algos_names = algos_names+check_names)) # Step 2: On cree les fonctions qui permettent de rentrer les donnees des algorithmes -for assim_data_input_name in infos.AssimDataDict.keys(): +for assim_data_input_name in infos.AssimDataDict: logging.debug("An input function data input is found: " + assim_data_input_name) - # assim_name = assim_data_input_name + # assim_name = assim_data_input_name storage = "" choices = "" default_choice = "" @@ -421,7 +421,7 @@ for assim_data_input_name in infos.AssimDataDict.keys(): default_choice = default_choice)) # Step 3: On ajoute les fonctions representant les options possibles -for opt_name in infos.OptDict.keys(): +for opt_name in infos.OptDict: logging.debug("An optional node is found: " + opt_name) data_name = opt_name data_into = "" @@ -433,7 +433,7 @@ for opt_name in infos.OptDict.keys(): # On choisit le default data_default = "\"" + infos.OptDefaultDict[opt_name] + "\"" - if infos.DataSValueDefaultDict.has_key(opt_name): + if opt_name in infos.DataSValueDefaultDict: ms_default = " defaut=\"" + infos.DataSValueDefaultDict[opt_name] + "\"," mem_file.write(data_method.substitute(data_name = data_name, @@ -461,19 +461,18 @@ for algo in all_names: assim_study_object = daCore.AssimilationStudy.AssimilationStudy() assim_study_object.setAlgorithm(choice=algo) par_dict = assim_study_object.getAlgorithmParameters(False) - par_keys = par_dict.keys() - par_keys.sort() + par_keys = sorted(par_dict.keys()) algo_parameters = "" for pn in par_keys: if pn in ("StoreInternalVariables", "PlotAndSave", "ResultFile", "ResultTitle", "ResultLabel"): continue # Cles a supprimer pt = par_dict[pn]["typecast"] pd = par_dict[pn]["default"] pm = par_dict[pn]["message"] - if par_dict[pn].has_key("minval") and par_dict[pn]["minval"] is not None: + if "minval" in par_dict[pn] and par_dict[pn]["minval"] is not None: vi = ", val_min=%s"%par_dict[pn]["minval"] else: vi = "" - if par_dict[pn].has_key("minval") and par_dict[pn]["maxval"] is not None: + if "minval" in par_dict[pn] and par_dict[pn]["maxval"] is not None: vs = ", val_max=%s"%par_dict[pn]["maxval"] else: vs = "" @@ -483,9 +482,9 @@ for algo in all_names: algo_parameters += """ %s = SIMP(statut="f", typ="R"%s%s, min=1, max=1, defaut=%s, fr="%s"),\n"""%(pn,vi,vs,float(pd),pm) elif pt is bool: algo_parameters += """ %s = SIMP(statut="f", typ="I", min=1, max=1, defaut=%s, fr="%s"),\n"""%(pn,int(pd),pm) - elif pt is str and par_dict[pn].has_key("listval"): + elif pt is str and "listval" in par_dict[pn]: algo_parameters += """ %s = SIMP(statut="f", typ="TXM", min=1, max=1, defaut="%s", into=%s, fr="%s"),\n"""%(pn,str(pd),par_dict[pn]["listval"],pm) - elif pt is tuple and par_dict[pn].has_key("listval"): + elif pt is tuple and "listval" in par_dict[pn]: algo_parameters += """ %s = SIMP(statut="f", typ="TXM", max="**", into=%s, fr="%s"),\n"""%(pn,par_dict[pn]["listval"],pm) else: algo_parameters += """ %s = SIMP(statut="f", typ="TXM", fr="%s"),\n"""%(pn,pm) diff --git a/configure.ac b/configure.ac index 483d278..a0bdb8a 100644 --- a/configure.ac +++ b/configure.ac @@ -34,6 +34,7 @@ AC_PROG_LIBTOOL AC_PROG_CC AC_PROG_CXX +AM_PATH_PYTHON([3.4]) CHECK_KERNEL CHECK_OMNIORB CHECK_EFICAS diff --git a/doc/en/conf.py b/doc/en/conf.py index dd97d53..90c6fab 100644 --- a/doc/en/conf.py +++ b/doc/en/conf.py @@ -61,8 +61,8 @@ source_suffix = '.rst' master_doc = 'index' # General information about the project. -project = u'%s'%module_version.name -copyright = u'2008-%s, Jean-Philippe ARGAUD'%module_version.year +project = '%s'%module_version.name +copyright = '2008-%s, Jean-Philippe ARGAUD'%module_version.year # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -198,8 +198,8 @@ latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'ADAO.tex', u'ADAO documentation', - u'Jean-Philippe ARGAUD', 'manual'), + ('index', 'ADAO.tex', 'ADAO documentation', + 'Jean-Philippe ARGAUD', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -225,8 +225,8 @@ source_encoding = 'iso-8859-15' # Bibliographic Dublin Core info. epub_title = project -epub_author = u'Jean-Philippe ARGAUD' -epub_publisher = u'Jean-Philippe ARGAUD' +epub_author = 'Jean-Philippe ARGAUD' +epub_publisher = 'Jean-Philippe ARGAUD' epub_copyright = copyright # The language of the text. It defaults to the language option @@ -275,7 +275,7 @@ epub_copyright = copyright # would mean that specific document would be compressed # regardless of the global pdf_compressed setting. pdf_documents = [ - ('contents', u'ADAO', u'ADAO', u'Jean-Philippe ARGAUD', dict(pdf_compressed = True)), + ('contents', 'ADAO', 'ADAO', 'Jean-Philippe ARGAUD', dict(pdf_compressed = True)), ] # A comma-separated list of custom stylesheets. Example: pdf_stylesheets = ['sphinx','kerning','a4'] diff --git a/doc/fr/conf.py b/doc/fr/conf.py index fbd166e..76e199b 100644 --- a/doc/fr/conf.py +++ b/doc/fr/conf.py @@ -61,8 +61,8 @@ source_suffix = '.rst' master_doc = 'index' # General information about the project. -project = u'%s'%module_version.name -copyright = u'2008-%s, Jean-Philippe ARGAUD'%module_version.year +project = '%s'%module_version.name +copyright = '2008-%s, Jean-Philippe ARGAUD'%module_version.year # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -198,8 +198,8 @@ latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ - ('index', 'ADAO.tex', u'Documentation ADAO', - u'Jean-Philippe ARGAUD', 'manual'), + ('index', 'ADAO.tex', 'Documentation ADAO', + 'Jean-Philippe ARGAUD', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of @@ -225,8 +225,8 @@ source_encoding = 'iso-8859-15' # Bibliographic Dublin Core info. epub_title = project -epub_author = u'Jean-Philippe ARGAUD' -epub_publisher = u'Jean-Philippe ARGAUD' +epub_author = 'Jean-Philippe ARGAUD' +epub_publisher = 'Jean-Philippe ARGAUD' epub_copyright = copyright # The language of the text. It defaults to the language option @@ -275,7 +275,7 @@ epub_copyright = copyright # would mean that specific document would be compressed # regardless of the global pdf_compressed setting. pdf_documents = [ - ('contents', u'ADAO', u'ADAO', u'Jean-Philippe ARGAUD', dict(pdf_compressed = True)), + ('contents', 'ADAO', 'ADAO', 'Jean-Philippe ARGAUD', dict(pdf_compressed = True)), ] # A comma-separated list of custom stylesheets. Example: pdf_stylesheets = ['sphinx','kerning','a4'] diff --git a/examples/daSalome/test005_ADAO_scripts_for_JDC.py b/examples/daSalome/test005_ADAO_scripts_for_JDC.py index 2e1152e..a5e7f80 100644 --- a/examples/daSalome/test005_ADAO_scripts_for_JDC.py +++ b/examples/daSalome/test005_ADAO_scripts_for_JDC.py @@ -52,7 +52,8 @@ H = numpy.matrix(numpy.core.identity(dimension)) def FunctionH( X ): return H * X # -def AdjointH( (X, Y) ): +def AdjointH(xxx_todo_changeme ): + (X, Y) = xxx_todo_changeme return H.T * Y # # The possible computations diff --git a/examples/daSalome/test006_Observers_Observation_Operator.py b/examples/daSalome/test006_Observers_Observation_Operator.py index 5768de4..15464d9 100644 --- a/examples/daSalome/test006_Observers_Observation_Operator.py +++ b/examples/daSalome/test006_Observers_Observation_Operator.py @@ -54,7 +54,8 @@ def FunctionH( X ): time.sleep(1) return H * X # -def AdjointH( (X, Y) ): +def AdjointH(xxx_todo_changeme ): + (X, Y) = xxx_todo_changeme return H.T * Y # # The possible computations diff --git a/examples/daSalome/test006_Observers_init.py b/examples/daSalome/test006_Observers_init.py index f9645f5..f39c29e 100644 --- a/examples/daSalome/test006_Observers_init.py +++ b/examples/daSalome/test006_Observers_init.py @@ -55,10 +55,10 @@ BackgroundError = B # ---------------------------------------------------------- ObservationError = R -print xb -print B -print yo -print R +print(xb) +print(B) +print(yo) +print(R) # # Definition of the init_data dictionnary diff --git a/examples/daSalome/test006_Observers_observer_with_file.py b/examples/daSalome/test006_Observers_observer_with_file.py index 1ca883d..9c38407 100644 --- a/examples/daSalome/test006_Observers_observer_with_file.py +++ b/examples/daSalome/test006_Observers_observer_with_file.py @@ -1,6 +1,6 @@ -print " ---> observerState" -print " var =",var[-1] -print " info =",info +print(" ---> observerState") +print(" var =",var[-1]) +print(" info =",info) # import Gnuplot import os @@ -16,7 +16,7 @@ gp('set title "'+str(info)+'"') gp.plot( Gnuplot.Data( var[-1] ) ) filename = os.path.join("/tmp", "imageState_%02i.ps"%numero) -print " imageState \"%s\""%filename +print(" imageState \"%s\""%filename) gp.hardcopy(filename=filename, color=1) numero += 1 diff --git a/examples/daSkeletons/External_data_definition_by_scripts/Physical_data_and_covariance_matrices.py b/examples/daSkeletons/External_data_definition_by_scripts/Physical_data_and_covariance_matrices.py index baea4c8..4d8b9ee 100644 --- a/examples/daSkeletons/External_data_definition_by_scripts/Physical_data_and_covariance_matrices.py +++ b/examples/daSkeletons/External_data_definition_by_scripts/Physical_data_and_covariance_matrices.py @@ -51,14 +51,14 @@ def Simple_Matrix( size, diagonal=None ): # ============================================================================== if __name__ == "__main__": - print - print "AUTODIAGNOSTIC" - print "==============" + print() + print("AUTODIAGNOSTIC") + print("==============") - print - print "True_state = ", True_state() - print - print "B or R =\n",Simple_Matrix(3) - print - print "B or R =\n",Simple_Matrix(4, diagonal=numpy.arange(4,dtype=float)) - print + print() + print("True_state = ", True_state()) + print() + print("B or R =\n",Simple_Matrix(3)) + print() + print("B or R =\n",Simple_Matrix(4, diagonal=numpy.arange(4,dtype=float))) + print() diff --git a/examples/daSkeletons/External_data_definition_by_scripts/Physical_simulation_functions.py b/examples/daSkeletons/External_data_definition_by_scripts/Physical_simulation_functions.py index c92e03a..f22098c 100644 --- a/examples/daSkeletons/External_data_definition_by_scripts/Physical_simulation_functions.py +++ b/examples/daSkeletons/External_data_definition_by_scripts/Physical_simulation_functions.py @@ -38,9 +38,9 @@ def DirectOperator( XX ): """ Direct non-linear simulation operator """ # # --------------------------------------> EXAMPLE TO BE REMOVED - if type(XX) is type(numpy.matrix([])): # EXAMPLE TO BE REMOVED + if isinstance(XX, type(numpy.matrix([]))): # EXAMPLE TO BE REMOVED HX = XX.A1.tolist() # EXAMPLE TO BE REMOVED - elif type(XX) is type(numpy.array([])): # EXAMPLE TO BE REMOVED + elif isinstance(XX, type(numpy.array([]))): # EXAMPLE TO BE REMOVED HX = numpy.matrix(XX).A1.tolist() # EXAMPLE TO BE REMOVED else: # EXAMPLE TO BE REMOVED HX = XX # EXAMPLE TO BE REMOVED @@ -57,13 +57,13 @@ AdjointOperator = FDA.AdjointOperator # ============================================================================== if __name__ == "__main__": - print - print "AUTODIAGNOSTIC" - print "==============" + print() + print("AUTODIAGNOSTIC") + print("==============") from Physical_data_and_covariance_matrices import True_state X0, noms = True_state() FX = DirectOperator( X0 ) - print "FX =", FX - print + print("FX =", FX) + print() diff --git a/examples/daSkeletons/External_data_definition_by_scripts/Script_UserPostAnalysis.py b/examples/daSkeletons/External_data_definition_by_scripts/Script_UserPostAnalysis.py index 9a9f4fe..5af6110 100644 --- a/examples/daSkeletons/External_data_definition_by_scripts/Script_UserPostAnalysis.py +++ b/examples/daSkeletons/External_data_definition_by_scripts/Script_UserPostAnalysis.py @@ -41,15 +41,15 @@ J = ADD.get("CostFunctionJ")[:] # # Verifying the results by printing # --------------------------------- -print -print "obs = [%s]"%(", ".join(["%.4f"%v for v in ADD.get("Observation").A1])) -print -print "xb = [%s]"%(", ".join(["%.4f"%v for v in ADD.get("Background").A1])) -print "xt = [%s]"%(", ".join(["%.4f"%v for v in numpy.array(xt)])) -print "xa = [%s]"%(", ".join(["%.4f"%v for v in numpy.array(xa)])) -print +print() +print("obs = [%s]"%(", ".join(["%.4f"%v for v in ADD.get("Observation").A1]))) +print() +print("xb = [%s]"%(", ".join(["%.4f"%v for v in ADD.get("Background").A1]))) +print("xt = [%s]"%(", ".join(["%.4f"%v for v in numpy.array(xt)]))) +print("xa = [%s]"%(", ".join(["%.4f"%v for v in numpy.array(xa)]))) +print() for i in range( len(x_series) ): - print "Step %2i : J = %.4e X = [%s]"%(i, J[i], ", ".join(["%.4f"%v for v in x_series[i]])) -print + print("Step %2i : J = %.4e X = [%s]"%(i, J[i], ", ".join(["%.4f"%v for v in x_series[i]]))) +print() # # ============================================================================== diff --git a/src/daComposant/daAlgorithms/3DVAR.py b/src/daComposant/daAlgorithms/3DVAR.py index adf9626..fdf7f62 100644 --- a/src/daComposant/daAlgorithms/3DVAR.py +++ b/src/daComposant/daAlgorithms/3DVAR.py @@ -330,7 +330,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["MahalanobisConsistency"].store( float( 2.*MinJ/d.size ) ) if "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: - Qtls = map(float, self._parameters["Quantiles"]) + Qtls = list(map(float, self._parameters["Quantiles"])) nech = self._parameters["NumberOfSamplesForQuantiles"] HXa = numpy.matrix(numpy.ravel( HXa )).T YfQ = None diff --git a/src/daComposant/daAlgorithms/4DVAR.py b/src/daComposant/daAlgorithms/4DVAR.py index 48a015d..7186ae7 100644 --- a/src/daComposant/daAlgorithms/4DVAR.py +++ b/src/daComposant/daAlgorithms/4DVAR.py @@ -229,7 +229,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Point de démarrage de l'optimisation : Xini = Xb # ------------------------------------ - if type(Xb) is type(numpy.matrix([])): + if isinstance(Xb, type(numpy.matrix([]))): Xini = Xb.A1.tolist() else: Xini = list(Xb) diff --git a/src/daComposant/daAlgorithms/AdjointTest.py b/src/daComposant/daAlgorithms/AdjointTest.py index 78088e3..8347f76 100644 --- a/src/daComposant/daAlgorithms/AdjointTest.py +++ b/src/daComposant/daAlgorithms/AdjointTest.py @@ -163,7 +163,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Sorties eventuelles # ------------------- - print("\nResults of adjoint check by \"%s\" formula:"%self._parameters["ResiduFormula"]) + print(("\nResults of adjoint check by \"%s\" formula:"%self._parameters["ResiduFormula"])) print(msgs) # self._post_run(HO) diff --git a/src/daComposant/daAlgorithms/Blue.py b/src/daComposant/daAlgorithms/Blue.py index a382264..fa17ab0 100644 --- a/src/daComposant/daAlgorithms/Blue.py +++ b/src/daComposant/daAlgorithms/Blue.py @@ -172,7 +172,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["MahalanobisConsistency"].store( float( 2.*J/d.size ) ) if "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: - Qtls = map(float, self._parameters["Quantiles"]) + Qtls = list(map(float, self._parameters["Quantiles"])) nech = self._parameters["NumberOfSamplesForQuantiles"] YfQ = None for i in range(nech): diff --git a/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py b/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py index e9d9a5f..536ae4d 100644 --- a/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py +++ b/src/daComposant/daAlgorithms/DerivativeFreeOptimization.py @@ -217,8 +217,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf') ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf') if self._parameters["optdisp"]: - print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)) - print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)) + print(("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))) + print(("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))) opt.set_upper_bounds(ub) opt.set_lower_bounds(lb) opt.set_ftol_rel(self._parameters["CostDecrementTolerance"]) @@ -226,9 +226,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) Minimum = opt.optimize( Xini ) if self._parameters["optdisp"]: - print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)) - print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())) - print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())) + print(("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))) + print(("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))) + print(("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))) elif self._parameters["Minimizer"] == "SIMPLEX" and not PlatformInfo.has_nlopt: Minimum, J_optimal, niter, nfeval, rc = scipy.optimize.fmin( func = CostFunction, @@ -253,8 +253,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf') ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf') if self._parameters["optdisp"]: - print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)) - print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)) + print(("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))) + print(("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))) opt.set_upper_bounds(ub) opt.set_lower_bounds(lb) opt.set_ftol_rel(self._parameters["CostDecrementTolerance"]) @@ -262,9 +262,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) Minimum = opt.optimize( Xini ) if self._parameters["optdisp"]: - print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)) - print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())) - print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())) + print(("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))) + print(("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))) + print(("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))) elif self._parameters["Minimizer"] == "BOBYQA" and PlatformInfo.has_nlopt: import nlopt opt = nlopt.opt(nlopt.LN_BOBYQA, Xini.size) @@ -277,8 +277,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf') ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf') if self._parameters["optdisp"]: - print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)) - print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)) + print(("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))) + print(("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))) opt.set_upper_bounds(ub) opt.set_lower_bounds(lb) opt.set_ftol_rel(self._parameters["CostDecrementTolerance"]) @@ -286,9 +286,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) Minimum = opt.optimize( Xini ) if self._parameters["optdisp"]: - print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)) - print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())) - print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())) + print(("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))) + print(("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))) + print(("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))) elif self._parameters["Minimizer"] == "NEWUOA" and PlatformInfo.has_nlopt: import nlopt opt = nlopt.opt(nlopt.LN_NEWUOA, Xini.size) @@ -301,8 +301,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf') ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf') if self._parameters["optdisp"]: - print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)) - print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)) + print(("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))) + print(("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))) opt.set_upper_bounds(ub) opt.set_lower_bounds(lb) opt.set_ftol_rel(self._parameters["CostDecrementTolerance"]) @@ -310,9 +310,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) Minimum = opt.optimize( Xini ) if self._parameters["optdisp"]: - print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)) - print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())) - print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())) + print(("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))) + print(("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))) + print(("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))) elif self._parameters["Minimizer"] == "SUBPLEX" and PlatformInfo.has_nlopt: import nlopt opt = nlopt.opt(nlopt.LN_SBPLX, Xini.size) @@ -325,8 +325,8 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): lb = lub[:,0] ; lb[numpy.isnan(lb)] = -float('inf') ub = lub[:,1] ; ub[numpy.isnan(ub)] = +float('inf') if self._parameters["optdisp"]: - print("%s: upper bounds %s"%(opt.get_algorithm_name(),ub)) - print("%s: lower bounds %s"%(opt.get_algorithm_name(),lb)) + print(("%s: upper bounds %s"%(opt.get_algorithm_name(),ub))) + print(("%s: lower bounds %s"%(opt.get_algorithm_name(),lb))) opt.set_upper_bounds(ub) opt.set_lower_bounds(lb) opt.set_ftol_rel(self._parameters["CostDecrementTolerance"]) @@ -334,9 +334,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): opt.set_maxeval(self._parameters["MaximumNumberOfFunctionEvaluations"]) Minimum = opt.optimize( Xini ) if self._parameters["optdisp"]: - print("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum)) - print("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value())) - print("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result())) + print(("%s: optimal state: %s"%(opt.get_algorithm_name(),Minimum))) + print(("%s: minimum of J: %s"%(opt.get_algorithm_name(),opt.last_optimum_value()))) + print(("%s: return code: %i"%(opt.get_algorithm_name(),opt.last_optimize_result()))) else: raise ValueError("Error in Minimizer name: %s"%self._parameters["Minimizer"]) # diff --git a/src/daComposant/daAlgorithms/ExtendedBlue.py b/src/daComposant/daAlgorithms/ExtendedBlue.py index 4752f14..5412b57 100644 --- a/src/daComposant/daAlgorithms/ExtendedBlue.py +++ b/src/daComposant/daAlgorithms/ExtendedBlue.py @@ -171,7 +171,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): if "MahalanobisConsistency" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["MahalanobisConsistency"].store( float( 2.*J/d.size ) ) if "SimulationQuantiles" in self._parameters["StoreSupplementaryCalculations"]: - Qtls = map(float, self._parameters["Quantiles"]) + Qtls = list(map(float, self._parameters["Quantiles"])) nech = self._parameters["NumberOfSamplesForQuantiles"] HtM = HO["Tangent"].asMatrix(ValueForMethodForm = Xa) HtM = HtM.reshape(Y.size,Xa.size) # ADAO & check shape diff --git a/src/daComposant/daAlgorithms/FunctionTest.py b/src/daComposant/daAlgorithms/FunctionTest.py index 8b4cd5b..a8b6ad5 100644 --- a/src/daComposant/daAlgorithms/FunctionTest.py +++ b/src/daComposant/daAlgorithms/FunctionTest.py @@ -77,7 +77,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msg = " ====" + "="*len(self._parameters["ResultTitle"]) + "====\n" msg += " " + self._parameters["ResultTitle"] + "\n" msg += " ====" + "="*len(self._parameters["ResultTitle"]) + "====\n" - print("%s"%msg) + print(("%s"%msg)) # msg = ("===> Information before launching:\n") msg += (" -----------------------------\n") @@ -105,9 +105,9 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): for i in range(self._parameters["NumberOfRepetition"]): if "CurrentState" in self._parameters["StoreSupplementaryCalculations"]: self.StoredVariables["CurrentState"].store( numpy.ravel(Xn) ) - print(" %s\n"%("-"*75,)) + print((" %s\n"%("-"*75,))) if self._parameters["NumberOfRepetition"] > 1: - print("===> Repetition step number %i on a total of %i\n"%(i+1,self._parameters["NumberOfRepetition"])) + print(("===> Repetition step number %i on a total of %i\n"%(i+1,self._parameters["NumberOfRepetition"]))) print("===> Launching direct operator evaluation\n") # Yn = Hm( Xn ) @@ -134,7 +134,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): HO["Direct"].enableAvoidingRedundancy() # ---------- # - print(" %s\n"%("-"*75,)) + print((" %s\n"%("-"*75,))) if self._parameters["SetDebug"]: print("===> End evaluation, deactivating debug if necessary\n") logging.getLogger().setLevel(CUR_LEVEL) diff --git a/src/daComposant/daAlgorithms/GradientTest.py b/src/daComposant/daAlgorithms/GradientTest.py index e81fd57..ab7ae74 100644 --- a/src/daComposant/daAlgorithms/GradientTest.py +++ b/src/daComposant/daAlgorithms/GradientTest.py @@ -276,7 +276,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): msgs += "\n" # # ---------- - print("\nResults of gradient check by \"%s\" formula:"%self._parameters["ResiduFormula"]) + print(("\nResults of gradient check by \"%s\" formula:"%self._parameters["ResiduFormula"])) print(msgs) # if self._parameters["PlotAndSave"]: @@ -370,7 +370,7 @@ def dessiner( if filename != "": __g.hardcopy( filename, color=1) if pause: - raw_input('Please press return to continue...\n') + eval(input('Please press return to continue...\n')) # ============================================================================== if __name__ == "__main__": diff --git a/src/daComposant/daAlgorithms/LinearityTest.py b/src/daComposant/daAlgorithms/LinearityTest.py index d4ca039..d2ba30e 100644 --- a/src/daComposant/daAlgorithms/LinearityTest.py +++ b/src/daComposant/daAlgorithms/LinearityTest.py @@ -334,7 +334,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Sorties eventuelles # ------------------- - print("\nResults of linearity check by \"%s\" formula:"%self._parameters["ResiduFormula"]) + print(("\nResults of linearity check by \"%s\" formula:"%self._parameters["ResiduFormula"])) print(msgs) # self._post_run(HO) diff --git a/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py b/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py index aaa289d..16909d3 100644 --- a/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py +++ b/src/daComposant/daAlgorithms/ParticleSwarmOptimization.py @@ -101,7 +101,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): def run(self, Xb=None, Y=None, U=None, HO=None, EM=None, CM=None, R=None, B=None, Q=None, Parameters=None): self._pre_run(Parameters) # - if "BoxBounds" in self._parameters and (type(self._parameters["BoxBounds"]) is type([]) or type(self._parameters["BoxBounds"]) is type(())) and (len(self._parameters["BoxBounds"]) > 0): + if "BoxBounds" in self._parameters and (isinstance(self._parameters["BoxBounds"], type([])) or isinstance(self._parameters["BoxBounds"], type(()))) and (len(self._parameters["BoxBounds"]) > 0): BoxBounds = self._parameters["BoxBounds"] logging.debug("%s Prise en compte des bornes d'incréments de paramètres effectuee"%(self._name,)) else: @@ -156,7 +156,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Point de démarrage de l'optimisation : Xini = Xb # ------------------------------------ - if type(Xb) is type(numpy.matrix([])): + if isinstance(Xb, type(numpy.matrix([]))): Xini = Xb.A1.tolist() elif Xb is not None: Xini = list(Xb) diff --git a/src/daComposant/daAlgorithms/QuantileRegression.py b/src/daComposant/daAlgorithms/QuantileRegression.py index 3d95d36..27e71fb 100644 --- a/src/daComposant/daAlgorithms/QuantileRegression.py +++ b/src/daComposant/daAlgorithms/QuantileRegression.py @@ -120,7 +120,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Point de démarrage de l'optimisation : Xini = Xb # ------------------------------------ - if type(Xb) is type(numpy.matrix([])): + if isinstance(Xb, type(numpy.matrix([]))): Xini = Xb.A1.tolist() else: Xini = list(Xb) diff --git a/src/daComposant/daAlgorithms/SamplingTest.py b/src/daComposant/daAlgorithms/SamplingTest.py index 77d08af..7d3dc16 100644 --- a/src/daComposant/daAlgorithms/SamplingTest.py +++ b/src/daComposant/daAlgorithms/SamplingTest.py @@ -166,12 +166,12 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): CUR_LEVEL = logging.getLogger().getEffectiveLevel() logging.getLogger().setLevel(logging.DEBUG) print("===> Beginning of evaluation, activating debug\n") - print(" %s\n"%("-"*75,)) + print((" %s\n"%("-"*75,))) # # ---------- for i,Xx in enumerate(sampleList): if self._parameters["SetDebug"]: - print("===> Launching evaluation for state %i"%i) + print(("===> Launching evaluation for state %i"%i)) __Xn = numpy.asmatrix(numpy.ravel( Xx )).T try: Yn = Hm( __Xn ) @@ -182,7 +182,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # ---------- # if self._parameters["SetDebug"]: - print("\n %s\n"%("-"*75,)) + print(("\n %s\n"%("-"*75,))) print("===> End evaluation, deactivating debug if necessary\n") logging.getLogger().setLevel(CUR_LEVEL) # diff --git a/src/daComposant/daAlgorithms/TangentTest.py b/src/daComposant/daAlgorithms/TangentTest.py index ca81ee5..6afde7d 100644 --- a/src/daComposant/daAlgorithms/TangentTest.py +++ b/src/daComposant/daAlgorithms/TangentTest.py @@ -191,7 +191,7 @@ class ElementaryAlgorithm(BasicObjects.Algorithm): # # Sorties eventuelles # ------------------- - print("\nResults of tangent check by \"%s\" formula:"%self._parameters["ResiduFormula"]) + print(("\nResults of tangent check by \"%s\" formula:"%self._parameters["ResiduFormula"])) print(msgs) # self._post_run(HO) diff --git a/src/daComposant/daCore/AssimilationStudy.py b/src/daComposant/daCore/AssimilationStudy.py index 82fca1b..cc98478 100644 --- a/src/daComposant/daCore/AssimilationStudy.py +++ b/src/daComposant/daCore/AssimilationStudy.py @@ -322,13 +322,13 @@ class AssimilationStudy: # if appliedToX is not None: self.__HO["AppliedToX"] = {} - if type(appliedToX) is not dict: + if not isinstance(appliedToX, dict): raise ValueError("Error: observation operator defined by \"appliedToX\" need a dictionary as argument.") for key in list(appliedToX.keys()): - if type( appliedToX[key] ) is type( numpy.matrix([]) ): + if isinstance(appliedToX[key], type( numpy.matrix([]) )): # Pour le cas où l'on a une vraie matrice self.__HO["AppliedToX"][key] = numpy.matrix( appliedToX[key].A1, numpy.float ).T - elif type( appliedToX[key] ) is type( numpy.array([]) ) and len(appliedToX[key].shape) > 1: + elif isinstance(appliedToX[key], type( numpy.array([]) )) and len(appliedToX[key].shape) > 1: # Pour le cas où l'on a un vecteur représenté en array avec 2 dimensions self.__HO["AppliedToX"][key] = numpy.matrix( appliedToX[key].reshape(len(appliedToX[key]),), numpy.float ).T else: @@ -782,13 +782,13 @@ class AssimilationStudy: if not( min(__EM_shape) == max(__EM_shape) ): raise ValueError("Shape characteristic of evolution operator (EM) is incorrect: \"%s\"."%(__EM_shape,)) # - if len(self.__HO) > 0 and not(type(self.__HO) is type({})) and not( __HO_shape[1] == max(__Xb_shape) ): + if len(self.__HO) > 0 and not(isinstance(self.__HO, type({}))) and not( __HO_shape[1] == max(__Xb_shape) ): raise ValueError("Shape characteristic of observation operator (H) \"%s\" and state (X) \"%s\" are incompatible."%(__HO_shape,__Xb_shape)) - if len(self.__HO) > 0 and not(type(self.__HO) is type({})) and not( __HO_shape[0] == max(__Y_shape) ): + if len(self.__HO) > 0 and not(isinstance(self.__HO, type({}))) and not( __HO_shape[0] == max(__Y_shape) ): raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation (Y) \"%s\" are incompatible."%(__HO_shape,__Y_shape)) - if len(self.__HO) > 0 and not(type(self.__HO) is type({})) and len(self.__B) > 0 and not( __HO_shape[1] == __B_shape[0] ): + if len(self.__HO) > 0 and not(isinstance(self.__HO, type({}))) and len(self.__B) > 0 and not( __HO_shape[1] == __B_shape[0] ): raise ValueError("Shape characteristic of observation operator (H) \"%s\" and a priori errors covariance matrix (B) \"%s\" are incompatible."%(__HO_shape,__B_shape)) - if len(self.__HO) > 0 and not(type(self.__HO) is type({})) and len(self.__R) > 0 and not( __HO_shape[0] == __R_shape[1] ): + if len(self.__HO) > 0 and not(isinstance(self.__HO, type({}))) and len(self.__R) > 0 and not( __HO_shape[0] == __R_shape[1] ): raise ValueError("Shape characteristic of observation operator (H) \"%s\" and observation errors covariance matrix (R) \"%s\" are incompatible."%(__HO_shape,__R_shape)) # if self.__B is not None and len(self.__B) > 0 and not( __B_shape[1] == max(__Xb_shape) ): @@ -804,10 +804,10 @@ class AssimilationStudy: if self.__R is not None and len(self.__R) > 0 and not( __R_shape[1] == max(__Y_shape) ): raise ValueError("Shape characteristic of observation errors covariance matrix (R) \"%s\" and observation (Y) \"%s\" are incompatible."%(__R_shape,__Y_shape)) # - if self.__EM is not None and len(self.__EM) > 0 and not(type(self.__EM) is type({})) and not( __EM_shape[1] == max(__Xb_shape) ): + if self.__EM is not None and len(self.__EM) > 0 and not(isinstance(self.__EM, type({}))) and not( __EM_shape[1] == max(__Xb_shape) ): raise ValueError("Shape characteristic of evolution model (EM) \"%s\" and state (X) \"%s\" are incompatible."%(__EM_shape,__Xb_shape)) # - if self.__CM is not None and len(self.__CM) > 0 and not(type(self.__CM) is type({})) and not( __CM_shape[1] == max(__U_shape) ): + if self.__CM is not None and len(self.__CM) > 0 and not(isinstance(self.__CM, type({}))) and not( __CM_shape[1] == max(__U_shape) ): raise ValueError("Shape characteristic of control model (CM) \"%s\" and control (U) \"%s\" are incompatible."%(__CM_shape,__U_shape)) # if ("AlgorithmParameters" in self.__StoredInputs) \ diff --git a/src/daComposant/daCore/BasicObjects.py b/src/daComposant/daCore/BasicObjects.py index 2d52bcb..1013cfa 100644 --- a/src/daComposant/daCore/BasicObjects.py +++ b/src/daComposant/daCore/BasicObjects.py @@ -369,7 +369,7 @@ class Algorithm(object): self.__setParameters(Parameters) # # Corrections et complements - if "Bounds" in self._parameters and (type(self._parameters["Bounds"]) is type([]) or type(self._parameters["Bounds"]) is type(())) and (len(self._parameters["Bounds"]) > 0): + if "Bounds" in self._parameters and (isinstance(self._parameters["Bounds"], type([])) or isinstance(self._parameters["Bounds"], type(()))) and (len(self._parameters["Bounds"]) > 0): logging.debug("%s Prise en compte des bornes effectuee"%(self._name,)) else: self._parameters["Bounds"] = None @@ -430,7 +430,7 @@ class Algorithm(object): def keys(self): "D.keys() -> list of D's keys" - return self.StoredVariables.keys() + return list(self.StoredVariables.keys()) def run(self, Xb=None, Y=None, H=None, M=None, R=None, B=None, Q=None, Parameters=None): """ @@ -463,8 +463,7 @@ class Algorithm(object): dictionnaire des paramètres requis. """ if noDetails: - ks = list(self.__required_parameters.keys()) - ks.sort() + ks = sorted(self.__required_parameters.keys()) return ks else: return self.__required_parameters @@ -506,8 +505,8 @@ class Algorithm(object): Permet de stocker les paramètres reçus dans le dictionnaire interne. """ self._parameters.update( fromDico ) - for k in self.__required_parameters.keys(): - if k in fromDico.keys(): + for k in list(self.__required_parameters.keys()): + if k in list(fromDico.keys()): self._parameters[k] = self.setParameterValue(k,fromDico[k]) else: self._parameters[k] = self.setParameterValue(k) diff --git a/src/daComposant/daCore/Persistence.py b/src/daComposant/daCore/Persistence.py index d867d07..352c0ad 100644 --- a/src/daComposant/daCore/Persistence.py +++ b/src/daComposant/daCore/Persistence.py @@ -151,8 +151,8 @@ class Persistence(object): # --------------------------------------------------------- def __filteredIndexes(self, **kwargs): "Function interne filtrant les index" - __indexOfFilteredItems = range(len(self.__tags)) - __filteringKwTags = kwargs.keys() + __indexOfFilteredItems = list(range(len(self.__tags))) + __filteringKwTags = list(kwargs.keys()) if len(__filteringKwTags) > 0: for tagKey in __filteringKwTags: __tmp = [] @@ -198,9 +198,8 @@ class Persistence(object): "D.tagkeys() -> list of D's tag keys" __allKeys = [] for dicotags in self.__tags: - __allKeys.extend( dicotags.keys() ) - __allKeys = list(set(__allKeys)) - __allKeys.sort() + __allKeys.extend( list(dicotags.keys()) ) + __allKeys = sorted(set(__allKeys)) return __allKeys # def valueserie(self, item=None, allSteps=True, **kwargs): @@ -224,10 +223,9 @@ class Persistence(object): if outputTag is not None and isinstance(outputTag,str) : outputValues = [] for index in __indexOfFilteredItems: - if outputTag in self.__tags[index].keys(): + if outputTag in list(self.__tags[index].keys()): outputValues.append( self.__tags[index][outputTag] ) - outputValues = list(set(outputValues)) - outputValues.sort() + outputValues = sorted(set(outputValues)) return outputValues # # Dans le cas où la sortie donne les tags satisfaisants aux conditions @@ -238,8 +236,7 @@ class Persistence(object): allTags = {} for index in __indexOfFilteredItems: allTags.update( self.__tags[index] ) - allKeys = list(allTags.keys()) - allKeys.sort() + allKeys = sorted(allTags.keys()) return allKeys # --------------------------------------------------------- @@ -415,7 +412,7 @@ class Persistence(object): elif item is not None and item < len(self.__values): indexes.append(item) else: - indexes = indexes + range(len(self.__values)) + indexes = indexes + list(range(len(self.__values))) # i = -1 for index in indexes: @@ -423,7 +420,7 @@ class Persistence(object): if isinstance(steps,list) or isinstance(steps,numpy.ndarray): Steps = list(steps) else: - Steps = range(len(self.__values[index])) + Steps = list(range(len(self.__values[index]))) # self.__g.plot( self.__gnuplot.Data( Steps, self.__values[index], title=ltitle ) ) # @@ -434,7 +431,7 @@ class Persistence(object): raise ValueError("Error: a file with this name \"%s\" already exists."%stepfilename) self.__g.hardcopy(filename=stepfilename, color=1) if self.__pause: - raw_input('Please press return to continue...\n') + eval(input('Please press return to continue...\n')) def __replots(self): """ @@ -443,11 +440,11 @@ class Persistence(object): if self.__dynamic and len(self.__values) < 2: return 0 # self.__g('set title "'+str(self.__title).encode('ascii','replace')) - Steps = range(len(self.__values)) + Steps = list(range(len(self.__values))) self.__g.plot( self.__gnuplot.Data( Steps, self.__values, title=self.__ltitle ) ) # if self.__pause: - raw_input('Please press return to continue...\n') + eval(input('Please press return to continue...\n')) # --------------------------------------------------------- def mean(self): @@ -583,7 +580,7 @@ class Persistence(object): if isinstance(steps,list) or isinstance(steps, numpy.ndarray): Steps = list(steps) else: - Steps = range(len(self.__values[0])) + Steps = list(range(len(self.__values[0]))) self.__g = self.__gnuplot.Gnuplot() # persist=1 self.__g('set terminal '+self.__gnuplot.GnuplotOpts.default_term) self.__g('set style data lines') @@ -602,7 +599,7 @@ class Persistence(object): if filename != "": self.__g.hardcopy(filename=filename, color=1) if pause: - raw_input('Please press return to continue...\n') + eval(input('Please press return to continue...\n')) # --------------------------------------------------------- def setDataObserver(self, HookFunction = None, HookParameters = None, Scheduler = None): @@ -618,22 +615,22 @@ class Persistence(object): maxiter = int( 1e9 ) if sys.version.split()[0] < '3': if isinstance(Scheduler,int): # Considéré comme une fréquence à partir de 0 - Schedulers = xrange( 0, maxiter, int(Scheduler) ) + Schedulers = list(range( 0, maxiter, int(Scheduler))) elif isinstance(Scheduler,xrange): # Considéré comme un itérateur Schedulers = Scheduler elif isinstance(Scheduler,(list,tuple)): # Considéré comme des index explicites - Schedulers = [long(i) for i in Scheduler] # map( long, Scheduler ) + Schedulers = [int(i) for i in Scheduler] # map( long, Scheduler ) else: # Dans tous les autres cas, activé par défaut - Schedulers = xrange( 0, maxiter ) + Schedulers = list(range( 0, maxiter)) else: if isinstance(Scheduler,int): # Considéré comme une fréquence à partir de 0 - Schedulers = range( 0, maxiter, int(Scheduler) ) + Schedulers = list(range( 0, maxiter, int(Scheduler))) elif sys.version.split()[0] > '3' and isinstance(Scheduler,range): # Considéré comme un itérateur Schedulers = Scheduler elif isinstance(Scheduler,(list,tuple)): # Considéré comme des index explicites Schedulers = [int(i) for i in Scheduler] # map( int, Scheduler ) else: # Dans tous les autres cas, activé par défaut - Schedulers = range( 0, maxiter ) + Schedulers = list(range( 0, maxiter)) # # Stockage interne de l'observer dans la variable # ----------------------------------------------- @@ -647,7 +644,7 @@ class Persistence(object): définition, ou un simple string qui est le nom de la fonction. """ if hasattr(HookFunction,"func_name"): - name = str( HookFunction.func_name ) + name = str( HookFunction.__name__ ) elif isinstance(HookFunction,str): name = str( HookFunction ) else: @@ -767,7 +764,7 @@ class CompositePersistence(object): Stockage d'une valeur "value" pour le "step" dans la variable "name". """ if name is None: raise ValueError("Storable object name is required for storage.") - if name not in self.__StoredObjects.keys(): + if name not in list(self.__StoredObjects.keys()): raise ValueError("No such name '%s' exists in storable objects."%name) self.__StoredObjects[name].store( value=value, **kwargs ) @@ -777,7 +774,7 @@ class CompositePersistence(object): type de Persistence et son type de base à chaque pas. """ if name is None: raise ValueError("Object name is required for adding an object.") - if name in self.__StoredObjects.keys(): + if name in list(self.__StoredObjects.keys()): raise ValueError("An object with the same name '%s' already exists in storable objects. Choose another one."%name) if basetype is None: self.__StoredObjects[name] = persistenceType( name=str(name) ) @@ -789,7 +786,7 @@ class CompositePersistence(object): Renvoie l'objet de type Persistence qui porte le nom demandé. """ if name is None: raise ValueError("Object name is required for retrieving an object.") - if name not in self.__StoredObjects.keys(): + if name not in list(self.__StoredObjects.keys()): raise ValueError("No such name '%s' exists in stored objects."%name) return self.__StoredObjects[name] @@ -801,7 +798,7 @@ class CompositePersistence(object): fonctionne. """ if name is None: raise ValueError("Object name is required for setting an object.") - if name in self.__StoredObjects.keys(): + if name in list(self.__StoredObjects.keys()): raise ValueError("An object with the same name '%s' already exists in storable objects. Choose another one."%name) self.__StoredObjects[name] = objet @@ -810,7 +807,7 @@ class CompositePersistence(object): Supprime un objet de la liste des objets stockables. """ if name is None: raise ValueError("Object name is required for retrieving an object.") - if name not in self.__StoredObjects.keys(): + if name not in list(self.__StoredObjects.keys()): raise ValueError("No such name '%s' exists in stored objects."%name) del self.__StoredObjects[name] @@ -830,16 +827,16 @@ class CompositePersistence(object): def values(self): "D.values() -> list of D's values" - return self.__StoredObjects.values() + return list(self.__StoredObjects.values()) def items(self): "D.items() -> list of D's (key, value) pairs, as 2-tuples" - return self.__StoredObjects.items() + return list(self.__StoredObjects.items()) # --------------------------------------------------------- def get_stored_objects(self, hideVoidObjects = False): "Renvoie la liste des objets présents" - objs = self.__StoredObjects.keys() + objs = list(self.__StoredObjects.keys()) if hideVoidObjects: usedObjs = [] for k in objs: @@ -848,8 +845,7 @@ class CompositePersistence(object): finally: pass objs = usedObjs - objs = list(objs) - objs.sort() + objs = sorted(objs) return objs # --------------------------------------------------------- @@ -870,7 +866,7 @@ class CompositePersistence(object): filename = os.path.abspath( filename ) # if sys.version.split()[0] < '3': - import cPickle as lPickle + import pickle as lPickle else: import pickle as lPickle if mode == "pickle": @@ -900,7 +896,7 @@ class CompositePersistence(object): filename = os.path.abspath( filename ) # if sys.version.split()[0] < '3': - import cPickle as lPickle + import pickle as lPickle else: import pickle as lPickle if mode == "pickle": @@ -913,7 +909,7 @@ class CompositePersistence(object): else: pkl_file = open(filename, 'rb') output = lPickle.load(pkl_file) - for k in output.keys(): + for k in list(output.keys()): self[k] = output[k] else: raise ValueError("Load mode '%s' unknown. Choose another one."%mode) diff --git a/src/daComposant/daCore/PlatformInfo.py b/src/daComposant/daCore/PlatformInfo.py index f14654e..f9f9fd0 100644 --- a/src/daComposant/daCore/PlatformInfo.py +++ b/src/daComposant/daCore/PlatformInfo.py @@ -197,7 +197,7 @@ class PathManagement(object): self.__paths["daMatrices"] = os.path.join(parent,"daMatrices") self.__paths["daNumerics"] = os.path.join(parent,"daNumerics") # - for v in self.__paths.values(): + for v in list(self.__paths.values()): sys.path.insert(0, v ) # # Conserve en unique exemplaire chaque chemin diff --git a/src/daComposant/daCore/Templates.py b/src/daComposant/daCore/Templates.py index a6d1ec4..dbab12b 100644 --- a/src/daComposant/daCore/Templates.py +++ b/src/daComposant/daCore/Templates.py @@ -56,8 +56,7 @@ class TemplateStorage(object): def keys(self): "D.keys() -> list of D's keys" - __keys = list(self.__values.keys()) - __keys.sort() + __keys = sorted(self.__values.keys()) return __keys # def has_key(self, name): @@ -84,10 +83,10 @@ class TemplateStorage(object): def keys_in_presentation_order(self): "D.keys_in_presentation_order() -> list of D's keys in presentation order" __orders = [] - for k in self.keys(): + for k in list(self.keys()): __orders.append( self.__values[k]['order'] ) __reorder = numpy.array(__orders).argsort() - return list(numpy.array(self.keys())[__reorder]) + return list(numpy.array(list(self.keys()))[__reorder]) # ============================================================================== ObserverTemplates = TemplateStorage() diff --git a/src/daComposant/daDiagnostics/PlotVector.py b/src/daComposant/daDiagnostics/PlotVector.py index 97751e4..e3459de 100644 --- a/src/daComposant/daDiagnostics/PlotVector.py +++ b/src/daComposant/daDiagnostics/PlotVector.py @@ -65,7 +65,7 @@ class ElementaryDiagnostic(BasicObjects.Diagnostic): if filename != "": self.__g.hardcopy(filename=filename, color=1) if pause: - raw_input('Please press return to continue...\n') + eval(input('Please press return to continue...\n')) # return 1 @@ -102,8 +102,8 @@ class ElementaryDiagnostic(BasicObjects.Diagnostic): if Vector.size < 1: raise ValueError("The given vector must not be empty") if steps is None: - Steps = range(len( vector )) - elif not ( type(steps) is type([]) or type(steps) is not type(numpy.array([])) ): + Steps = list(range(len( vector ))) + elif not ( isinstance(steps, type([])) or not isinstance(steps, type(numpy.array([]))) ): raise ValueError("The steps must be given as a list/tuple.") else: Steps = list(steps) diff --git a/src/daComposant/daDiagnostics/PlotVectors.py b/src/daComposant/daDiagnostics/PlotVectors.py index 24777c0..2100a34 100644 --- a/src/daComposant/daDiagnostics/PlotVectors.py +++ b/src/daComposant/daDiagnostics/PlotVectors.py @@ -67,7 +67,7 @@ class ElementaryDiagnostic(BasicObjects.Diagnostic): if filename != "": self.__g.hardcopy(filename=filename, color=1) if pause: - raw_input('Please press return to continue...\n') + eval(input('Please press return to continue...\n')) # return 1 @@ -102,7 +102,7 @@ class ElementaryDiagnostic(BasicObjects.Diagnostic): """ if vector is None: raise ValueError("One vector must be given to plot it.") - if type(vector) is not type([]) and type(vector) is not type(()): + if not isinstance(vector, type([])) and not isinstance(vector, type(())): raise ValueError("The vector(s) must be given as a list/tuple.") if ltitle is None or len(ltitle) != len(vector): ltitle = ["" for i in range(len(vector))] @@ -112,8 +112,8 @@ class ElementaryDiagnostic(BasicObjects.Diagnostic): if VectorList[-1].size < 1: raise ValueError("Each given vector must not be empty.") if steps is None: - Steps = range(len(vector[0])) - elif not ( type(steps) is type([]) or type(steps) is not type(numpy.array([])) ): + Steps = list(range(len(vector[0]))) + elif not ( isinstance(steps, type([])) or not isinstance(steps, type(numpy.array([]))) ): raise ValueError("The steps must be given as a list/tuple.") else: Steps = list(steps) diff --git a/src/daComposant/daDiagnostics/RMS.py b/src/daComposant/daDiagnostics/RMS.py index 509c15b..5e5a282 100644 --- a/src/daComposant/daDiagnostics/RMS.py +++ b/src/daComposant/daDiagnostics/RMS.py @@ -82,7 +82,7 @@ if __name__ == "__main__": vect2 = [0,0,0,0,0,0,0,0,0,0] D.calculate(vect1,vect2) print(" Les valeurs de RMS attendues sont les suivantes : [1.0, 1.0, 1.0, 3.0, 0.53162016515553656, 0.73784217096601323]") - print(" Les RMS obtenues................................: %s"%(D[:],)) - print(" La moyenne......................................: %s"%(D.mean(),)) + print((" Les RMS obtenues................................: %s"%(D[:],))) + print((" La moyenne......................................: %s"%(D.mean(),))) print("") diff --git a/src/daComposant/daDiagnostics/ReduceVariance.py b/src/daComposant/daDiagnostics/ReduceVariance.py index f8d22b3..39a4e1e 100644 --- a/src/daComposant/daDiagnostics/ReduceVariance.py +++ b/src/daComposant/daDiagnostics/ReduceVariance.py @@ -78,12 +78,12 @@ if __name__ == "__main__": # ---------------------- x1 = numpy.matrix(([3. , 4., 5. ])) x2 = numpy.matrix(([1.5, 2., 2.5])) - print(" L'écart entre les observations et l'ébauche est OMB : %s"%(x1,)) - print(" La moyenne de OMB (i.e. le biais) est de............: %s"%(x1.mean(),)) - print(" La variance de OMB est de...........................: %s"%(x1.var(),)) - print(" L'écart entre les observations et l'analyse est OMA : %s"%(x2,)) - print(" La moyenne de OMA (i.e. le biais) est de............: %s"%(x2.mean(),)) - print(" La variance de OMA est de...........................: %s"%(x2.var(),)) + print((" L'écart entre les observations et l'ébauche est OMB : %s"%(x1,))) + print((" La moyenne de OMB (i.e. le biais) est de............: %s"%(x1.mean(),))) + print((" La variance de OMB est de...........................: %s"%(x1.var(),))) + print((" L'écart entre les observations et l'analyse est OMA : %s"%(x2,))) + print((" La moyenne de OMA (i.e. le biais) est de............: %s"%(x2.mean(),))) + print((" La variance de OMA est de...........................: %s"%(x2.var(),))) # D.calculate( vectorOMB = x1, vectorOMA = x2) if not D[0] : @@ -94,14 +94,14 @@ if __name__ == "__main__": # # Vecteur de type array # --------------------- - x1 = numpy.array(range(11)) - x2 = numpy.matrix(range(-10,12,2)) - print(" L'écart entre les observations et l'ébauche est OMB : %s"%(x1,)) - print(" La moyenne de OMB (i.e. le biais) est de............: %s"%(x1.mean(),)) - print(" La variance de OMB est de...........................: %s"%(x1.var(),)) - print(" L'écart entre les observations et l'analyse est OMA : %s"%(x2,)) - print(" La moyenne de OMA (i.e. le biais) est de............: %s"%(x2.mean(),)) - print(" La variance de OMA est de...........................: %s"%(x2.var(),)) + x1 = numpy.array(list(range(11))) + x2 = numpy.matrix(list(range(-10,12,2))) + print((" L'écart entre les observations et l'ébauche est OMB : %s"%(x1,))) + print((" La moyenne de OMB (i.e. le biais) est de............: %s"%(x1.mean(),))) + print((" La variance de OMB est de...........................: %s"%(x1.var(),))) + print((" L'écart entre les observations et l'analyse est OMA : %s"%(x2,))) + print((" La moyenne de OMA (i.e. le biais) est de............: %s"%(x2.mean(),))) + print((" La variance de OMA est de...........................: %s"%(x2.var(),))) # D.calculate( vectorOMB = x1, vectorOMA = x2) if not D[1] : diff --git a/src/daComposant/daNumerics/ApproximatedDerivatives.py b/src/daComposant/daNumerics/ApproximatedDerivatives.py index cd6581b..4c96774 100644 --- a/src/daComposant/daNumerics/ApproximatedDerivatives.py +++ b/src/daComposant/daNumerics/ApproximatedDerivatives.py @@ -98,7 +98,7 @@ class FDApproximation(object): try: mod = os.path.join(Function.__globals__['filepath'],Function.__globals__['filename']) except: - mod = os.path.abspath(Function.im_func.__globals__['__file__']) + mod = os.path.abspath(Function.__func__.__globals__['__file__']) if not os.path.isfile(mod): raise ImportError("No user defined function or method found with the name %s"%(mod,)) self.__userFunction__modl = os.path.basename(mod).replace('.pyc','').replace('.pyo','').replace('.py','') diff --git a/src/daEficas/configuration_ADAO.py b/src/daEficas/configuration_ADAO.py index 98aec1d..d859851 100644 --- a/src/daEficas/configuration_ADAO.py +++ b/src/daEficas/configuration_ADAO.py @@ -20,10 +20,10 @@ # # See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com # -# Author: André Ribes, andre.ribes@edf.fr, EDF R&D +# Author: André Ribes, andre.ribes@edf.fr, EDF R&D """ - Ce module sert pour charger les paramètres de configuration d'EFICAS + Ce module sert pour charger les paramètres de configuration d'EFICAS """ # Modules Python # print "passage dans la surcharge de configuration pour Adao" diff --git a/src/daEficas/generator_adao.py b/src/daEficas/generator_adao.py index 0bd1bff..7309d4a 100644 --- a/src/daEficas/generator_adao.py +++ b/src/daEficas/generator_adao.py @@ -58,7 +58,7 @@ class AdaoGenerator(PythonGenerator): def gener(self,obj,format='brut',config=None,appli=None): self.logger.debug("method gener called") self.text_comm = PythonGenerator.gener(self, obj, format, config) - for key, value in self.dictMCVal.iteritems(): + for key, value in list(self.dictMCVal.items()): self.logger.debug("dictMCVAl %s %s" % (key,value)) try : @@ -69,7 +69,7 @@ class AdaoGenerator(PythonGenerator): self.logger.debug("EFICAS case is not valid, python command file for YACS schema generation cannot be created") self.logger.debug(self.text_da) self.dictMCVal = {} - # traceback.print_exc() + # traceback.print_exc() return self.text_comm def writeDefault(self, fn): @@ -94,7 +94,7 @@ class AdaoGenerator(PythonGenerator): def generate_da(self): - if "__CHECKING_STUDY__StudyName" in self.dictMCVal.keys(): + if "__CHECKING_STUDY__StudyName" in list(self.dictMCVal.keys()): self.type_of_study = "CHECKING_STUDY" else: self.type_of_study = "ASSIMILATION_STUDY" @@ -107,49 +107,49 @@ class AdaoGenerator(PythonGenerator): # Extraction de StudyName self.text_da += "study_config['Name'] = '" + self.dictMCVal["__"+self.type_of_study+"__StudyName"] + "'\n" # Extraction de Debug - if "__"+self.type_of_study+"__Debug" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__Debug" in list(self.dictMCVal.keys()): self.text_da += "study_config['Debug'] = '" + str(self.dictMCVal["__"+self.type_of_study+"__Debug"]) + "'\n" else: self.text_da += "study_config['Debug'] = '0'\n" # Extraction de Algorithm et de ses parametres - if "__"+self.type_of_study+"__AlgorithmParameters__Algorithm" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__AlgorithmParameters__Algorithm" in list(self.dictMCVal.keys()): self.text_da += "study_config['Algorithm'] = '" + self.dictMCVal["__"+self.type_of_study+"__AlgorithmParameters__Algorithm"] + "'\n" self.add_AlgorithmParameters() - elif "__"+self.type_of_study+"__Algorithm" in self.dictMCVal.keys(): + elif "__"+self.type_of_study+"__Algorithm" in list(self.dictMCVal.keys()): self.text_da += "study_config['Algorithm'] = '" + self.dictMCVal["__"+self.type_of_study+"__Algorithm"] + "'\n" - if "__"+self.type_of_study+"__Background__INPUT_TYPE" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__Background__INPUT_TYPE" in list(self.dictMCVal.keys()): self.add_data("Background") - if "__"+self.type_of_study+"__BackgroundError__INPUT_TYPE" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__BackgroundError__INPUT_TYPE" in list(self.dictMCVal.keys()): self.add_data("BackgroundError") - if "__"+self.type_of_study+"__Observation__INPUT_TYPE" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__Observation__INPUT_TYPE" in list(self.dictMCVal.keys()): self.add_data("Observation") - if "__"+self.type_of_study+"__ObservationError__INPUT_TYPE" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__ObservationError__INPUT_TYPE" in list(self.dictMCVal.keys()): self.add_data("ObservationError") - if "__"+self.type_of_study+"__CheckingPoint__INPUT_TYPE" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__CheckingPoint__INPUT_TYPE" in list(self.dictMCVal.keys()): self.add_data("CheckingPoint") - if "__"+self.type_of_study+"__ObservationOperator__INPUT_TYPE" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__ObservationOperator__INPUT_TYPE" in list(self.dictMCVal.keys()): self.add_data("ObservationOperator") - if "__"+self.type_of_study+"__EvolutionModel__INPUT_TYPE" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__EvolutionModel__INPUT_TYPE" in list(self.dictMCVal.keys()): self.add_data("EvolutionModel") - if "__"+self.type_of_study+"__EvolutionError__INPUT_TYPE" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__EvolutionError__INPUT_TYPE" in list(self.dictMCVal.keys()): self.add_data("EvolutionError") - if "__"+self.type_of_study+"__ControlInput__INPUT_TYPE" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__ControlInput__INPUT_TYPE" in list(self.dictMCVal.keys()): self.add_data("ControlInput") self.add_variables() # Parametres optionnels # Extraction du StudyRepertory - if "__"+self.type_of_study+"__StudyRepertory" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__StudyRepertory" in list(self.dictMCVal.keys()): self.text_da += "study_config['Repertory'] = '" + self.dictMCVal["__"+self.type_of_study+"__StudyRepertory"] + "'\n" # Extraction de UserPostAnalysis - if "__"+self.type_of_study+"__UserPostAnalysis__FROM" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__UserPostAnalysis__FROM" in list(self.dictMCVal.keys()): self.add_UserPostAnalysis() - if "__"+self.type_of_study+"__UserDataInit__INIT_FILE" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__UserDataInit__INIT_FILE" in list(self.dictMCVal.keys()): self.add_init() - if "__"+self.type_of_study+"__Observers__SELECTION" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__Observers__SELECTION" in list(self.dictMCVal.keys()): self.add_observers() def add_data(self, data_name): @@ -180,7 +180,7 @@ class AdaoGenerator(PythonGenerator): self.text_da += data_name + "_config['Type'] = '" + data_type + "'\n" self.text_da += data_name + "_config['From'] = '" + from_type + "'\n" self.text_da += data_name + "_config['Data'] = '" + data + "'\n" - if search_text+"Stored" in self.dictMCVal.keys(): + if search_text+"Stored" in list(self.dictMCVal.keys()): self.text_da += data_name + "_config['Stored'] = '" + str(self.dictMCVal[search_text+"Stored"]) + "'\n" self.text_da += "study_config['" + data_name + "'] = " + data_name + "_config\n" @@ -219,9 +219,9 @@ class AdaoGenerator(PythonGenerator): self.text_da += data_name + "_ScriptWithOneFunction['Script']['Adjoint'] = '" + data + "'\n" self.text_da += data_name + "_ScriptWithOneFunction['DifferentialIncrement'] = " + str(float(self.dictMCVal[search_type + "SCRIPTWITHONEFUNCTION_DATA__DifferentialIncrement"])) + "\n" self.text_da += data_name + "_ScriptWithOneFunction['CenteredFiniteDifference'] = " + str(self.dictMCVal[search_type + "SCRIPTWITHONEFUNCTION_DATA__CenteredFiniteDifference"]) + "\n" - if search_type + "SCRIPTWITHONEFUNCTION_DATA__EnableMultiProcessing" in self.dictMCVal.keys(): + if search_type + "SCRIPTWITHONEFUNCTION_DATA__EnableMultiProcessing" in list(self.dictMCVal.keys()): self.text_da += data_name + "_ScriptWithOneFunction['EnableMultiProcessing'] = " + str(self.dictMCVal[search_type + "SCRIPTWITHONEFUNCTION_DATA__EnableMultiProcessing"]) + "\n" - if search_type + "SCRIPTWITHONEFUNCTION_DATA__NumberOfProcesses" in self.dictMCVal.keys(): + if search_type + "SCRIPTWITHONEFUNCTION_DATA__NumberOfProcesses" in list(self.dictMCVal.keys()): self.text_da += data_name + "_ScriptWithOneFunction['NumberOfProcesses'] = " + str(self.dictMCVal[search_type + "SCRIPTWITHONEFUNCTION_DATA__NumberOfProcesses"]) + "\n" self.text_da += data_name + "_config = {}\n" self.text_da += data_name + "_config['Type'] = 'Function'\n" @@ -252,7 +252,7 @@ class AdaoGenerator(PythonGenerator): self.text_da += "Init_config['From'] = 'Script'\n" self.text_da += "Init_config['Data'] = '" + init_file_data + "'\n" self.text_da += "Init_config['Target'] = [" - if type(init_target_list) is type("str"): + if isinstance(init_target_list, type("str")): self.text_da += "'" + init_target_list + "'," else: for target in init_target_list: @@ -288,7 +288,7 @@ class AdaoGenerator(PythonGenerator): def add_AlgorithmParameters(self): - if not self.dictMCVal.has_key("__"+self.type_of_study+"__AlgorithmParameters__Parameters"): return + if "__"+self.type_of_study+"__AlgorithmParameters__Parameters" not in self.dictMCVal: return data_name = "AlgorithmParameters" data_type = "Dict" @@ -314,7 +314,7 @@ class AdaoGenerator(PythonGenerator): self.text_da += "study_config['" + data_name + "'] = " + data_name + "_config\n" elif from_type == "Defaults": base = "__"+self.type_of_study+"__AlgorithmParameters__Parameters" - keys = [k for k in self.dictMCVal.keys() if base in k] + keys = [k for k in list(self.dictMCVal.keys()) if base in k] keys.remove(base) keys = [k.replace(base,'') for k in keys] data = '{' @@ -339,7 +339,7 @@ class AdaoGenerator(PythonGenerator): def add_variables(self): # Input variables - if "__"+self.type_of_study+"__InputVariables__NAMES" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__InputVariables__NAMES" in list(self.dictMCVal.keys()): names = [] sizes = [] if isinstance(self.dictMCVal["__"+self.type_of_study+"__InputVariables__NAMES"], type("")): @@ -363,7 +363,7 @@ class AdaoGenerator(PythonGenerator): self.text_da += "study_config['InputVariables'] = inputvariables_config\n" # Output variables - if "__"+self.type_of_study+"__OutputVariables__NAMES" in self.dictMCVal.keys(): + if "__"+self.type_of_study+"__OutputVariables__NAMES" in list(self.dictMCVal.keys()): names = [] sizes = [] if isinstance(self.dictMCVal["__"+self.type_of_study+"__OutputVariables__NAMES"], type("")): @@ -398,7 +398,7 @@ class AdaoGenerator(PythonGenerator): # Write observers in the python command file number = 2 self.text_da += "observers = {}\n" - for observer in observers.keys(): + for observer in list(observers.keys()): number += 1 self.text_da += "observers[\"" + observer + "\"] = {}\n" self.text_da += "observers[\"" + observer + "\"][\"number\"] = " + str(number) + "\n" @@ -410,9 +410,9 @@ class AdaoGenerator(PythonGenerator): self.text_da += "observers[\"" + observer + "\"][\"Template\"] = \"\"\"" + observers[observer]["template"] + "\"\"\"\n" else: self.text_da += "observers[\"" + observer + "\"][\"Script\"] = \"" + observers[observer]["file"] + "\"\n" - if "scheduler" in observers[observer].keys(): + if "scheduler" in list(observers[observer].keys()): self.text_da += "observers[\"" + observer + "\"][\"scheduler\"] = \"\"\"" + observers[observer]["scheduler"] + "\"\"\"\n" - if "info" in observers[observer].keys(): + if "info" in list(observers[observer].keys()): self.text_da += "observers[\"" + observer + "\"][\"info\"] = \"\"\"" + observers[observer]["info"] + "\"\"\"\n" self.text_da += "study_config['Observers'] = observers\n" @@ -440,10 +440,10 @@ class AdaoGenerator(PythonGenerator): # Scheduler scheduler_key_name = observer_eficas_name + "Scheduler" - if scheduler_key_name in self.dictMCVal.keys(): + if scheduler_key_name in list(self.dictMCVal.keys()): observers[observer]["scheduler"] = self.dictMCVal[scheduler_key_name] # Info info_key_name = observer_eficas_name + "Info" - if info_key_name in self.dictMCVal.keys(): + if info_key_name in list(self.dictMCVal.keys()): observers[observer]["info"] = self.dictMCVal[info_key_name] diff --git a/src/daEficas/traduitADAOV7_4_0ToV8_3_0.py b/src/daEficas/traduitADAOV7_4_0ToV8_3_0.py index b834a83..9d4afe5 100644 --- a/src/daEficas/traduitADAOV7_4_0ToV8_3_0.py +++ b/src/daEficas/traduitADAOV7_4_0ToV8_3_0.py @@ -107,9 +107,9 @@ def main(): options, args = parser.parse_args() if len(options.infile) == 0: - print + print() parser.print_help() - print + print() sys.exit(1) traduc(options.infile,options.outfile) diff --git a/src/daEficas/traduitADAOV7_5_0ToV8_3_0.py b/src/daEficas/traduitADAOV7_5_0ToV8_3_0.py index b834a83..9d4afe5 100644 --- a/src/daEficas/traduitADAOV7_5_0ToV8_3_0.py +++ b/src/daEficas/traduitADAOV7_5_0ToV8_3_0.py @@ -107,9 +107,9 @@ def main(): options, args = parser.parse_args() if len(options.infile) == 0: - print + print() parser.print_help() - print + print() sys.exit(1) traduc(options.infile,options.outfile) diff --git a/src/daEficas/traduitADAOV7_5_1ToV8_3_0.py b/src/daEficas/traduitADAOV7_5_1ToV8_3_0.py index b834a83..9d4afe5 100644 --- a/src/daEficas/traduitADAOV7_5_1ToV8_3_0.py +++ b/src/daEficas/traduitADAOV7_5_1ToV8_3_0.py @@ -107,9 +107,9 @@ def main(): options, args = parser.parse_args() if len(options.infile) == 0: - print + print() parser.print_help() - print + print() sys.exit(1) traduc(options.infile,options.outfile) diff --git a/src/daEficas/traduitADAOV7_6_0ToV8_3_0.py b/src/daEficas/traduitADAOV7_6_0ToV8_3_0.py index 8a5d3d1..5096efb 100644 --- a/src/daEficas/traduitADAOV7_6_0ToV8_3_0.py +++ b/src/daEficas/traduitADAOV7_6_0ToV8_3_0.py @@ -87,9 +87,9 @@ def main(): options, args = parser.parse_args() if len(options.infile) == 0: - print + print() parser.print_help() - print + print() sys.exit(1) traduc(options.infile,options.outfile) diff --git a/src/daEficas/traduitADAOV7_7_0ToV8_3_0.py b/src/daEficas/traduitADAOV7_7_0ToV8_3_0.py index 8a5d3d1..5096efb 100644 --- a/src/daEficas/traduitADAOV7_7_0ToV8_3_0.py +++ b/src/daEficas/traduitADAOV7_7_0ToV8_3_0.py @@ -87,9 +87,9 @@ def main(): options, args = parser.parse_args() if len(options.infile) == 0: - print + print() parser.print_help() - print + print() sys.exit(1) traduc(options.infile,options.outfile) diff --git a/src/daEficas/traduitADAOV7_8_0ToV8_3_0.py b/src/daEficas/traduitADAOV7_8_0ToV8_3_0.py index 8a5d3d1..5096efb 100644 --- a/src/daEficas/traduitADAOV7_8_0ToV8_3_0.py +++ b/src/daEficas/traduitADAOV7_8_0ToV8_3_0.py @@ -87,9 +87,9 @@ def main(): options, args = parser.parse_args() if len(options.infile) == 0: - print + print() parser.print_help() - print + print() sys.exit(1) traduc(options.infile,options.outfile) diff --git a/src/daEficas/traduitADAOV8_1_0ToV8_3_0.py b/src/daEficas/traduitADAOV8_1_0ToV8_3_0.py index 8a5d3d1..5096efb 100644 --- a/src/daEficas/traduitADAOV8_1_0ToV8_3_0.py +++ b/src/daEficas/traduitADAOV8_1_0ToV8_3_0.py @@ -87,9 +87,9 @@ def main(): options, args = parser.parse_args() if len(options.infile) == 0: - print + print() parser.print_help() - print + print() sys.exit(1) traduc(options.infile,options.outfile) diff --git a/src/daEficas/traduitADAOV8_2_0ToV8_3_0.py b/src/daEficas/traduitADAOV8_2_0ToV8_3_0.py index 8a5d3d1..5096efb 100644 --- a/src/daEficas/traduitADAOV8_2_0ToV8_3_0.py +++ b/src/daEficas/traduitADAOV8_2_0ToV8_3_0.py @@ -87,9 +87,9 @@ def main(): options, args = parser.parse_args() if len(options.infile) == 0: - print + print() parser.print_help() - print + print() sys.exit(1) traduc(options.infile,options.outfile) diff --git a/src/daEficas/traduitADAOsansToV8_3_0.py b/src/daEficas/traduitADAOsansToV8_3_0.py index b834a83..9d4afe5 100644 --- a/src/daEficas/traduitADAOsansToV8_3_0.py +++ b/src/daEficas/traduitADAOsansToV8_3_0.py @@ -107,9 +107,9 @@ def main(): options, args = parser.parse_args() if len(options.infile) == 0: - print + print() parser.print_help() - print + print() sys.exit(1) traduc(options.infile,options.outfile) diff --git a/src/daSalome/daGUI/daEficasWrapper/adaoEficasWrapper.py b/src/daSalome/daGUI/daEficasWrapper/adaoEficasWrapper.py index 894443e..9b14506 100644 --- a/src/daSalome/daGUI/daEficasWrapper/adaoEficasWrapper.py +++ b/src/daSalome/daGUI/daEficasWrapper/adaoEficasWrapper.py @@ -82,7 +82,7 @@ class AdaoEficasWrapper(eficasSalome.MyEficas): debug("tabChanged " + str(index)) # This signal is also emit when a new case is created/added # On regarde que le dictionnaire contient l'index - if index in self.viewmanager.dict_editors.keys(): + if index in list(self.viewmanager.dict_editors.keys()): self.notifyObserver(EficasEvent.EVENT_TYPES.TABCHANGED, callbackId=self.viewmanager.dict_editors[index]) ####### @@ -156,7 +156,7 @@ class AdaoEficasWrapper(eficasSalome.MyEficas): def selectCase(self, editor): rtn = False - for indexEditor in self.viewmanager.dict_editors.keys(): + for indexEditor in list(self.viewmanager.dict_editors.keys()): if editor is self.viewmanager.dict_editors[indexEditor]: self.viewmanager.myQtab.setCurrentIndex(indexEditor) rtn = True diff --git a/src/daSalome/daGUI/daGuiImpl/ADAOGUI_impl.py b/src/daSalome/daGUI/daGuiImpl/ADAOGUI_impl.py index 6a6118f..3570041 100644 --- a/src/daSalome/daGUI/daGuiImpl/ADAOGUI_impl.py +++ b/src/daSalome/daGUI/daGuiImpl/ADAOGUI_impl.py @@ -56,7 +56,7 @@ __current_context__ = None def _setContext( studyID ): global __study2context__, __current_context__ QApplication.processEvents() - if not __study2context__.has_key(studyID): + if studyID not in __study2context__: __study2context__[studyID] = GUIcontext() pass __current_context__ = __study2context__[studyID] @@ -85,7 +85,7 @@ def windows(): # called when module is initialized # return list of 2d/3d views to be used ny the module def views(): - print "views" + print("views") return [] def createPreferences(): diff --git a/src/daSalome/daGUI/daGuiImpl/adaoCase.py b/src/daSalome/daGUI/daGuiImpl/adaoCase.py index 10a9868..12eae61 100644 --- a/src/daSalome/daGUI/daGuiImpl/adaoCase.py +++ b/src/daSalome/daGUI/daGuiImpl/adaoCase.py @@ -27,8 +27,8 @@ import SalomePyQt import eficasSalome from Ihm import CONNECTOR -import adaoGuiHelper -import adaoStudyEditor +from . import adaoGuiHelper +from . import adaoStudyEditor class AdaoCase: @@ -83,7 +83,7 @@ class AdaoCase: msg += "case with the ADAO/EFICAS editor." return msg - if not os.environ.has_key("ADAO_ROOT_DIR"): + if "ADAO_ROOT_DIR" not in os.environ: return "Please add ADAO_ROOT_DIR to your environnement." adao_path = os.environ["ADAO_ROOT_DIR"] @@ -117,8 +117,8 @@ class AdaoCase: def validationReportforJDC(self): rtn = "Validation report is empty." if self.eficas_editor.jdc: - rtn = u"Validation report for the selected ADAO case:\n\n" - rtn += unicode( self.eficas_editor.jdc.report()) + rtn = "Validation report for the selected ADAO case:\n\n" + rtn += str( self.eficas_editor.jdc.report()) return rtn def showTreeAdaoCase(self): diff --git a/src/daSalome/daGUI/daGuiImpl/adaoGuiHelper.py b/src/daSalome/daGUI/daGuiImpl/adaoGuiHelper.py index e3b3f50..6a4abf4 100644 --- a/src/daSalome/daGUI/daGuiImpl/adaoGuiHelper.py +++ b/src/daSalome/daGUI/daGuiImpl/adaoGuiHelper.py @@ -27,7 +27,7 @@ import salome import SalomePyQt __sgPyQt = SalomePyQt.SalomePyQt() -import adaoModuleHelper +from . import adaoModuleHelper from daUtils.qtversion import useQT5 if useQT5: from PyQt5 import QtGui, QtCore diff --git a/src/daSalome/daGUI/daGuiImpl/adaoGuiManager.py b/src/daSalome/daGUI/daGuiImpl/adaoGuiManager.py index 8c0250c..8d47feb 100644 --- a/src/daSalome/daGUI/daGuiImpl/adaoGuiManager.py +++ b/src/daSalome/daGUI/daGuiImpl/adaoGuiManager.py @@ -45,8 +45,8 @@ from daGuiImpl.adaoCase import AdaoCase from daEficasWrapper.adaoEficasWrapper import AdaoEficasWrapper from daUtils.adaoEficasEvent import * -import adaoGuiHelper -import adaoStudyEditor +from . import adaoGuiHelper +from . import adaoStudyEditor from daUtils import adaoLogger __cases__ = {} @@ -156,7 +156,7 @@ class AdaoCaseManager(EficasObserver): adaoLogger.debug("currentSelectionChanged") salomeStudyItem = adaoGuiHelper.getSelectedItem() if salomeStudyItem is not None: - for case_editor, adao_case in self.cases.iteritems(): + for case_editor, adao_case in list(self.cases.items()): if adao_case.salome_study_item.GetID() == salomeStudyItem.GetID(): self.eficas_manager.selectCase(adao_case.eficas_editor) break @@ -168,7 +168,7 @@ class AdaoCaseManager(EficasObserver): et la selection dans l'etude SALOME """ editor = eficasEvent.callbackId - for case_editor, adao_case in self.cases.iteritems(): + for case_editor, adao_case in list(self.cases.items()): if case_editor is editor: adaoGuiHelper.selectItem(adao_case.salome_study_item.GetID()) break @@ -184,7 +184,7 @@ class AdaoCaseManager(EficasObserver): editor = self.eficas_manager.getCurrentEditor() # 2: sync with SALOME GUI is a tab is opened if editor: - for case_editor, adao_case in self.cases.iteritems(): + for case_editor, adao_case in list(self.cases.items()): if case_editor is editor: adaoGuiHelper.selectItem(adao_case.salome_study_item.GetID()) break @@ -247,7 +247,7 @@ class AdaoCaseManager(EficasObserver): # dans le GUI d'Eficas self.harmonizeSelectionFromEficas() salomeStudyItem = adaoGuiHelper.getSelectedItem() - for case_name, adao_case in self.cases.iteritems(): + for case_name, adao_case in list(self.cases.items()): if adao_case.salome_study_item.GetID() == salomeStudyItem.GetID(): if not adao_case.isOk(): adaoLogger.debug("Cas invalide, donc il est sauvegarde, mais il ne peut pas etre exporte vers YACS ensuite") @@ -260,7 +260,7 @@ class AdaoCaseManager(EficasObserver): # dans le GUI d'Eficas self.harmonizeSelectionFromEficas() salomeStudyItem = adaoGuiHelper.getSelectedItem() - for case_name, adao_case in self.cases.iteritems(): + for case_name, adao_case in list(self.cases.items()): if adao_case.salome_study_item.GetID() == salomeStudyItem.GetID(): if not adao_case.isOk(): adaoLogger.debug("Cas invalide, donc il est sauvegarde, mais il ne peut pas etre exporte vers YACS ensuite") @@ -291,7 +291,7 @@ class AdaoCaseManager(EficasObserver): # dans le GUI d'Eficas self.harmonizeSelectionFromEficas() salomeStudyItem = adaoGuiHelper.getSelectedItem() - for case_name, adao_case in self.cases.iteritems(): + for case_name, adao_case in list(self.cases.items()): if adao_case.salome_study_item.GetID() == salomeStudyItem.GetID(): self.eficas_manager.adaoFileClose(adao_case) break @@ -321,7 +321,7 @@ class AdaoCaseManager(EficasObserver): adaoLogger.debug("Validation du cas par un rapport sur le JDC") self.harmonizeSelectionFromEficas() salomeStudyItem = adaoGuiHelper.getSelectedItem() - for case_name, adao_case in self.cases.iteritems(): + for case_name, adao_case in list(self.cases.items()): if adao_case.salome_study_item.GetID() == salomeStudyItem.GetID(): msg = adao_case.validationReportforJDC() adaoGuiHelper.gui_information(SalomePyQt.SalomePyQt().getDesktop(), msg) @@ -338,7 +338,7 @@ class AdaoCaseManager(EficasObserver): adaoLogger.debug("Validation du cas par un rapport sur le JDC") self.harmonizeSelectionFromEficas() salomeStudyItem = adaoGuiHelper.getSelectedItem() - for case_name, adao_case in self.cases.iteritems(): + for case_name, adao_case in list(self.cases.items()): if adao_case.salome_study_item.GetID() == salomeStudyItem.GetID(): msg = adao_case.showTreeAdaoCase() break @@ -356,7 +356,7 @@ class AdaoCaseManager(EficasObserver): # dans le GUI d'Eficas self.harmonizeSelectionFromEficas() salomeStudyItem = adaoGuiHelper.getSelectedItem() - for case_name, adao_case in self.cases.iteritems(): + for case_name, adao_case in list(self.cases.items()): if adao_case.salome_study_item.GetID() == salomeStudyItem.GetID(): if adao_case.isOk(): msg = adao_case.exportCaseToYACS() @@ -402,7 +402,7 @@ class AdaoCaseManager(EficasObserver): """ Main switch function for ui actions processing """ - if ACTIONS_MAP.has_key(actionId): + if actionId in ACTIONS_MAP: try: functionName = ACTIONS_MAP[actionId] getattr(self,functionName)() diff --git a/src/daSalome/daGUI/daGuiImpl/adaoStudyEditor.py b/src/daSalome/daGUI/daGuiImpl/adaoStudyEditor.py index f9a8e1d..70bb931 100644 --- a/src/daSalome/daGUI/daGuiImpl/adaoStudyEditor.py +++ b/src/daSalome/daGUI/daGuiImpl/adaoStudyEditor.py @@ -26,7 +26,7 @@ from daUtils.enumerate import Enumerate from salome.kernel import studyedit -import adaoModuleHelper +from . import adaoModuleHelper # # ============================================================================== diff --git a/src/daSalome/daGUI/daUtils/adaoEficasEvent.py b/src/daSalome/daGUI/daUtils/adaoEficasEvent.py index 06ee5c7..154009d 100644 --- a/src/daSalome/daGUI/daUtils/adaoEficasEvent.py +++ b/src/daSalome/daGUI/daUtils/adaoEficasEvent.py @@ -32,7 +32,7 @@ class DevelException(Exception): # Interface of an eficas observer (for implementing the subject/observer pattern) # ============================================================================== # -from enumerate import Enumerate +from .enumerate import Enumerate class EficasObserver: """ diff --git a/src/daSalome/daGUI/daUtils/enumerate.py b/src/daSalome/daGUI/daUtils/enumerate.py index 6584778..2aa33c3 100644 --- a/src/daSalome/daGUI/daUtils/enumerate.py +++ b/src/daSalome/daGUI/daUtils/enumerate.py @@ -45,19 +45,17 @@ class Enumerate(object): Return true if this enumerate contains the specified key @key a key to test """ - return (key in self._dict_keynumbers.keys()) + return (key in list(self._dict_keynumbers.keys())) def isValid(self, value): - return (value in self._dict_keynumbers.values()) + return (value in list(self._dict_keynumbers.values())) def listkeys(self): - list = self._dict_keynumbers.keys() - list.sort() + list = sorted(list(self._dict_keynumbers.keys())) return list def listvalues(self): - list = self._dict_keynumbers.values() - list.sort() + list = sorted(list(self._dict_keynumbers.values())) return list # @@ -71,7 +69,7 @@ def TEST_simple(): 'SEP', 'OTHER' ]) - print TYPES_LIST.listvalues() + print(TYPES_LIST.listvalues()) return True def TEST_createFromList(): @@ -82,8 +80,8 @@ def TEST_createFromList(): 'MED', 'SMESH']) - print codes.KERNEL - print codes.GEOM + print(codes.KERNEL) + print(codes.GEOM) if (codes.KERNEL == 0 and codes.GEOM == 2): return True else: @@ -94,8 +92,8 @@ def TEST_createFromString(): codes = Enumerate(aList.split()) - print codes.KERNEL - print codes.GEOM + print(codes.KERNEL) + print(codes.GEOM) if (codes.KERNEL == 0 and codes.GEOM == 2): return True else: @@ -109,7 +107,7 @@ def TEST_contains(): 'MED', 'SMESH']) - print "VISU in enumerate?", codes.contains("VISU") + print("VISU in enumerate?", codes.contains("VISU")) if ( not codes.contains("VISU") ): return True else: @@ -136,8 +134,8 @@ def TEST_offset(): 'MED', 'SMESH'], offset=20) - print codes.KERNEL - print codes.GEOM + print(codes.KERNEL) + print(codes.GEOM) if (codes.KERNEL == 20 and codes.GEOM == 22): return True else: @@ -151,7 +149,7 @@ def TEST_listvalues(): 'MED', 'SMESH'], offset=20) - print codes.listvalues() + print(codes.listvalues()) if codes.listvalues() != [20,21,22,23,24]: return False return True diff --git a/src/daSalome/daYacsIntegration/__init__.py b/src/daSalome/daYacsIntegration/__init__.py index 8116d88..b1289aa 100644 --- a/src/daSalome/daYacsIntegration/__init__.py +++ b/src/daSalome/daYacsIntegration/__init__.py @@ -21,4 +21,4 @@ # # Author: André Ribes, andre.ribes@edf.fr, EDF R&D -from daOptimizerLoop import * +from .daOptimizerLoop import * diff --git a/src/daSalome/daYacsIntegration/daOptimizerLoop.py b/src/daSalome/daYacsIntegration/daOptimizerLoop.py index 6a4d528..004b043 100644 --- a/src/daSalome/daYacsIntegration/daOptimizerLoop.py +++ b/src/daSalome/daYacsIntegration/daOptimizerLoop.py @@ -24,7 +24,7 @@ import SALOMERuntime import pilot -import pickle, cPickle +import pickle, pickle import numpy import threading @@ -159,7 +159,7 @@ class OptimizerHooks: self.optim_algo.pool.pushInSample(local_counter, sample) # 3: Wait - while 1: + while True: #print "waiting" self.optim_algo.signalMasterAndWait() #print "signal" @@ -183,8 +183,9 @@ class OptimizerHooks: #print "sync false is not yet implemented" self.optim_algo.setError("sync == false not yet implemented") - def Tangent(self, (X, dX), sync = 1): + def Tangent(self, xxx_todo_changeme, sync = 1): # print "Call Tangent OptimizerHooks" + (X, dX) = xxx_todo_changeme if sync == 1: # 1: Get a unique sample number self.optim_algo.counter_lock.acquire() @@ -196,7 +197,7 @@ class OptimizerHooks: self.optim_algo.pool.pushInSample(local_counter, sample) # 3: Wait - while 1: + while True: self.optim_algo.signalMasterAndWait() if self.optim_algo.isTerminationRequested(): self.optim_algo.pool.destroyAll() @@ -218,8 +219,9 @@ class OptimizerHooks: #print "sync false is not yet implemented" self.optim_algo.setError("sync == false not yet implemented") - def Adjoint(self, (X, Y), sync = 1): + def Adjoint(self, xxx_todo_changeme1, sync = 1): # print "Call Adjoint OptimizerHooks" + (X, Y) = xxx_todo_changeme1 if sync == 1: # 1: Get a unique sample number self.optim_algo.counter_lock.acquire() @@ -231,7 +233,7 @@ class OptimizerHooks: self.optim_algo.pool.pushInSample(local_counter, sample) # 3: Wait - while 1: + while True: #print "waiting" self.optim_algo.signalMasterAndWait() #print "signal" @@ -286,7 +288,7 @@ class AssimilationAlgorithm_asynch(SALOMERuntime.OptimizerAlgASync): #print "[Debug] Input is ", input str_da_study = input.getStringValue() try: - self.da_study = cPickle.loads(str_da_study) + self.da_study = pickle.loads(str_da_study) except ValueError as e: raise ValueError("\n\n Handling internal error in study exchange (message: \"%s\").\n The case is probably too big (bigger than the physical plus the virtual memory available).\n Try if possible to store the covariance matrices in sparse format.\n"%(str(e),)) #print "[Debug] da_study is ", self.da_study @@ -334,7 +336,7 @@ class AssimilationAlgorithm_asynch(SALOMERuntime.OptimizerAlgASync): # print "Not setting Hooks for EvolutionModel" # Set Observers - for observer_name in self.da_study.observers_dict.keys(): + for observer_name in list(self.da_study.observers_dict.keys()): # print "observers %s found" % observer_name self.has_observer = True if self.da_study.observers_dict[observer_name]["scheduler"] != "": @@ -343,11 +345,11 @@ class AssimilationAlgorithm_asynch(SALOMERuntime.OptimizerAlgASync): self.ADD.setDataObserver(observer_name, HookFunction=self.obs, HookParameters = observer_name) # Start Assimilation Study - print "Launching the analysis\n" + print("Launching the analysis\n") try: self.ADD.analyze() except Exception as e: - if type(e) == type(SyntaxError()): msg = "at %s: %s"%(e.offset, e.text) + if isinstance(e, type(SyntaxError())): msg = "at %s: %s"%(e.offset, e.text) else: msg = "" raise ValueError("during execution, the following error occurs:\n\n%s %s\n\nSee also the potential messages, which can show the origin of the above error, in the YACS GUI or in the launching terminal."%(str(e),msg)) @@ -391,7 +393,7 @@ class AssimilationAlgorithm_asynch(SALOMERuntime.OptimizerAlgASync): # Remove Data Observer, so you can ... var.removeDataObserver(self.obs) # Pickle then ... - var_str = cPickle.dumps(var) + var_str = pickle.dumps(var) # Add Again Data Observer if self.da_study.observers_dict[info]["scheduler"] != "": self.ADD.setDataObserver(info, HookFunction=self.obs, Scheduler = self.da_study.observers_dict[info]["scheduler"], HookParameters = info) @@ -416,7 +418,7 @@ class AssimilationAlgorithm_asynch(SALOMERuntime.OptimizerAlgASync): # Wait import sys, traceback try: - while 1: + while True: self.signalMasterAndWait() if self.isTerminationRequested(): self.pool.destroyAll() @@ -430,14 +432,14 @@ class AssimilationAlgorithm_asynch(SALOMERuntime.OptimizerAlgASync): self.counter_lock.release() break except: - print "Exception in user code:" - print '-'*60 + print("Exception in user code:") + print('-'*60) traceback.print_exc(file=sys.stdout) - print '-'*60 + print('-'*60) def getAlgoResult(self): # Remove data observers, required to pickle assimilation study object - for observer_name in self.da_study.observers_dict.keys(): + for observer_name in list(self.da_study.observers_dict.keys()): self.ADD.removeDataObserver(observer_name, self.obs) self.ADD.prepare_to_pickle() result = pickle.dumps(self.da_study) # Careful : pickle is mandatory over cPickle ! diff --git a/src/daSalome/daYacsSchemaCreator/help_methods.py b/src/daSalome/daYacsSchemaCreator/help_methods.py index 2a275f3..6ad8ea6 100644 --- a/src/daSalome/daYacsSchemaCreator/help_methods.py +++ b/src/daSalome/daYacsSchemaCreator/help_methods.py @@ -61,7 +61,7 @@ def check_study(study_config): # Repertory check_repertory = False repertory = "" - if "Repertory" in study_config.keys(): + if "Repertory" in list(study_config.keys()): repertory = study_config["Repertory"] check_repertory = True if not os.path.isabs(repertory): @@ -70,18 +70,18 @@ def check_study(study_config): # Check if all the data is provided for key in AlgoDataRequirements[study_config["Algorithm"]]: - if key not in study_config.keys(): + if key not in list(study_config.keys()): raise ValueError("\n\nCannot find " + key + " in your study configuration !" + "\n This key is mandatory into a study with " + study_config["Algorithm"] + " algorithm." + "\n " + study_config["Algorithm"] + " requirements are " + str(AlgoDataRequirements[study_config["Algorithm"]]) + "\n") # Data - for key in study_config.keys(): + for key in list(study_config.keys()): if key in AssimData: check_data(key, study_config[key], check_repertory, repertory) # UserDataInit - if "UserDataInit" in study_config.keys(): + if "UserDataInit" in list(study_config.keys()): check_data("UserDataInit", study_config["UserDataInit"], check_repertory, repertory) # Variables @@ -89,7 +89,7 @@ def check_study(study_config): check_variables("OutputVariables", study_config) # Analyse - if "UserPostAnalysis" in study_config.keys(): + if "UserPostAnalysis" in list(study_config.keys()): analysis_config = study_config["UserPostAnalysis"] if "From" not in analysis_config: raise ValueError("\n\n UserPostAnalysis found but From is not defined \n in the analysis configuration!\n") @@ -109,13 +109,13 @@ def check_study(study_config): " The given user file is:\n %s\n" % check_file_name) # Check observers - if "Observers" in study_config.keys(): + if "Observers" in list(study_config.keys()): for obs_var in study_config["Observers"]: # Check du type if not isinstance(study_config["Observers"][obs_var], type({})): raise ValueError("\n\n An observer description has to be a Python dictionary\n"+ " Observer is %s\n" % obs_var) - if "nodetype" not in study_config["Observers"][obs_var].keys(): + if "nodetype" not in list(study_config["Observers"][obs_var].keys()): raise ValueError("\n\n An observer description must provide a nodetype\n"+ " Observer is %s\n" % obs_var) nodetype = study_config["Observers"][obs_var]["nodetype"] @@ -126,31 +126,31 @@ def check_study(study_config): raise ValueError("\n\n An observer nodetype must be equal to 'String' or 'Script'\n"+ " Observer is %s\n" % obs_var) if nodetype == "String": - if "String" not in study_config["Observers"][obs_var].keys(): + if "String" not in list(study_config["Observers"][obs_var].keys()): raise ValueError("\n\n An observer with nodetype String must provide a String\n"+ " Observer is %s\n" % obs_var) if not isinstance(study_config["Observers"][obs_var]["String"], type("")): raise ValueError("\n\n An observer String description must be a string\n"+ " Observer is %s\n" % obs_var) if nodetype == "Script": - if "Script" not in study_config["Observers"][obs_var].keys(): + if "Script" not in list(study_config["Observers"][obs_var].keys()): raise ValueError("\n\n An observer with nodetype Script provide a Script\n"+ " Observer is %s\n" % obs_var) if not isinstance(study_config["Observers"][obs_var]["Script"], type("")): raise ValueError("\n\n An observer Script description must be a string\n"+ " Observer is %s\n" % obs_var) - if "scheduler" in study_config["Observers"][obs_var].keys(): + if "scheduler" in list(study_config["Observers"][obs_var].keys()): if not isinstance(study_config["Observers"][obs_var]["scheduler"], type("")): raise ValueError("\n\n An observer scheduler description must be a string\n"+ " Observer is %s\n" % obs_var) def check_variables(name, study_config): - if name not in study_config.keys(): + if name not in list(study_config.keys()): raise ValueError("\n\n %s not found in your study configuration!\n" % name) variable_config = study_config[name] - if "Order" not in variable_config.keys(): + if "Order" not in list(variable_config.keys()): raise ValueError("\n\n Order not found in the %s configuration!\n" % name) list_of_variables = variable_config["Order"] @@ -160,7 +160,7 @@ def check_variables(name, study_config): raise ValueError("\n\nOrder should contain one or more names in the %s configuration!\n" % name) for var in list_of_variables: - if var not in variable_config.keys(): + if var not in list(variable_config.keys()): raise ValueError("\n\n Variable %s not found in the %s configuration!\n" % name) value = variable_config[var] try: diff --git a/src/daSalome/daYacsSchemaCreator/methods.py b/src/daSalome/daYacsSchemaCreator/methods.py index 191e081..4c15e1a 100644 --- a/src/daSalome/daYacsSchemaCreator/methods.py +++ b/src/daSalome/daYacsSchemaCreator/methods.py @@ -70,7 +70,7 @@ def create_yacs_proc(study_config): t_bool = proc.getTypeCode("bool") t_param_input = proc.getTypeCode("SALOME_TYPES/ParametricInput") t_param_output = proc.getTypeCode("SALOME_TYPES/ParametricOutput") - if "Repertory" in study_config.keys(): + if "Repertory" in list(study_config.keys()): base_repertory = study_config["Repertory"] repertory = True else: @@ -86,7 +86,7 @@ def create_yacs_proc(study_config): CAS_node = factory_CAS_node.cloneNode("CreateAssimilationStudy") CAS_node.getInputPort("Name").edInitPy(study_config["Name"]) CAS_node.getInputPort("Algorithm").edInitPy(study_config["Algorithm"]) - if study_config.has_key("Debug") and study_config["Debug"] == "1": + if "Debug" in study_config and study_config["Debug"] == "1": CAS_node.getInputPort("Debug").edInitPy(True) else: CAS_node.getInputPort("Debug").edInitPy(False) @@ -112,7 +112,7 @@ def create_yacs_proc(study_config): # Adding an observer init node if an user defines some factory_init_observers_node = catalogAd.getNodeFromNodeMap("SetObserversNode") init_observers_node = factory_init_observers_node.cloneNode("SetObservers") - if "Observers" in study_config.keys(): + if "Observers" in list(study_config.keys()): node_script = init_observers_node.getScript() node_script += "has_observers = True\n" node_script += "observers = " + str(study_config["Observers"]) + "\n" @@ -132,7 +132,7 @@ def create_yacs_proc(study_config): # Step 0.5: Find if there is a user init node init_config = {} init_config["Target"] = [] - if "UserDataInit" in study_config.keys(): + if "UserDataInit" in list(study_config.keys()): init_config = study_config["UserDataInit"] factory_init_node = catalogAd.getNodeFromNodeMap("UserDataInitFromScript") init_node = factory_init_node.cloneNode("UserDataInit") @@ -145,11 +145,9 @@ def create_yacs_proc(study_config): # Step 1: get input data from user configuration - st_keys = study_config.keys() - st_keys.sort() + st_keys = sorted(list(study_config.keys())) for key in st_keys: - ad_keys = AssimData - ad_keys.sort() + ad_keys = sorted(AssimData) if key in ad_keys: data_config = study_config[key] @@ -541,9 +539,9 @@ def create_yacs_proc(study_config): node_script += """ Function = DirectOperator,\n""" node_script += """ increment = %s,\n"""%str(ScriptWithOneFunction['DifferentialIncrement']) node_script += """ centeredDF = %s,\n"""%str(ScriptWithOneFunction['CenteredFiniteDifference']) - if 'EnableMultiProcessing' in ScriptWithOneFunction.keys(): + if 'EnableMultiProcessing' in list(ScriptWithOneFunction.keys()): node_script += """ mpEnabled = %s,\n"""%str(ScriptWithOneFunction['EnableMultiProcessing']) - if 'NumberOfProcesses' in ScriptWithOneFunction.keys(): + if 'NumberOfProcesses' in list(ScriptWithOneFunction.keys()): node_script += """ mpWorkers = %s,\n"""%str(ScriptWithOneFunction['NumberOfProcesses']) node_script += """ )\n""" node_script += """#\n""" @@ -586,7 +584,7 @@ def create_yacs_proc(study_config): opt_script_nodeOO = factory_opt_script_node.cloneNode("FakeFunctionNode") # Check if we have a python script for OptimizerLoopNode - if "EvolutionModel" in study_config.keys(): + if "EvolutionModel" in list(study_config.keys()): data_config = study_config["EvolutionModel"] opt_script_nodeEM = None if data_config["Type"] == "Function" and (data_config["From"] == "ScriptWithSwitch" or data_config["From"] == "FunctionDict"): @@ -738,9 +736,9 @@ def create_yacs_proc(study_config): node_script += """ Function = DirectOperator,\n""" node_script += """ increment = %s,\n"""%str(ScriptWithOneFunction['DifferentialIncrement']) node_script += """ centeredDF = %s,\n"""%str(ScriptWithOneFunction['CenteredFiniteDifference']) - if 'EnableMultiProcessing' in ScriptWithOneFunction.keys(): + if 'EnableMultiProcessing' in list(ScriptWithOneFunction.keys()): node_script += """ mpEnabled = %s,\n"""%str(ScriptWithOneFunction['EnableMultiProcessing']) - if 'NumberOfProcesses' in ScriptWithOneFunction.keys(): + if 'NumberOfProcesses' in list(ScriptWithOneFunction.keys()): node_script += """ mpWorkers = %s,\n"""%str(ScriptWithOneFunction['NumberOfProcesses']) node_script += """ )\n""" node_script += """#\n""" @@ -785,7 +783,7 @@ def create_yacs_proc(study_config): opt_script_nodeEM = factory_opt_script_node.cloneNode("FakeFunctionNode") # Add computation bloc - if "Observers" in study_config.keys(): + if "Observers" in list(study_config.keys()): execution_bloc = runtime.createBloc("Execution Bloc") optimizer_node.edSetNode(execution_bloc) @@ -811,7 +809,7 @@ def create_yacs_proc(study_config): ADAO_Case.edAddDFLink(opt_script_nodeOO.getOutputPort("result"), optimizer_node.edGetPortForOutPool()) # Second case: evolution bloc - if "EvolutionModel" in study_config.keys(): + if "EvolutionModel" in list(study_config.keys()): computation_blocEM = runtime.createBloc("computation_blocEM") computation_blocEM.edAddChild(opt_script_nodeEM) switch_node.edSetNode(2, computation_blocEM) @@ -853,7 +851,7 @@ def create_yacs_proc(study_config): ADAO_Case.edAddCFLink(observation_node, end_observation_node) ADAO_Case.edAddDFLink(end_observation_node.getOutputPort("output"), optimizer_node.edGetPortForOutPool()) - elif "EvolutionModel" in study_config.keys(): + elif "EvolutionModel" in list(study_config.keys()): execution_bloc = runtime.createBloc("Execution Bloc") optimizer_node.edSetNode(execution_bloc) @@ -905,7 +903,7 @@ def create_yacs_proc(study_config): ADAO_Case.edAddDFLink(init_node.getOutputPort("init_data"), opt_script_nodeOO.getInputPort("init_data")) # Step 4: create post-processing from user configuration - if "UserPostAnalysis" in study_config.keys(): + if "UserPostAnalysis" in list(study_config.keys()): analysis_config = study_config["UserPostAnalysis"] if analysis_config["From"] == "String": factory_analysis_node = catalogAd.getNodeFromNodeMap("SimpleUserAnalysis") diff --git a/src/daSalome/daYacsSchemaCreator/run.py b/src/daSalome/daYacsSchemaCreator/run.py index 2b4a453..6eda6e9 100644 --- a/src/daSalome/daYacsSchemaCreator/run.py +++ b/src/daSalome/daYacsSchemaCreator/run.py @@ -33,7 +33,7 @@ def create_schema(config_file, yacs_schema_filename): # Import config_file try: - execfile(config_file) + exec(compile(open(config_file).read(), config_file, 'exec')) except: raise ValueError("\n\n Exception in loading %s"%config_file) diff --git a/test/test1001/Versions.py b/test/test1001/Versions.py index f09f010..498dc95 100644 --- a/test/test1001/Versions.py +++ b/test/test1001/Versions.py @@ -35,39 +35,39 @@ def compare_versions(v1,v2): for s in ['+', 'rc1', 'rc2', 'rc3']: v1 = v1.replace(s,'',1) v2 = v2.replace(s,'',1) - v11,v12,v13 = map(float,v1.split('.')) - v21,v22,v23 = map(float,v2.split('.')) + v11,v12,v13 = list(map(float,v1.split('.'))) + v21,v22,v23 = list(map(float,v2.split('.'))) lv1 = 1e6*v11 + 1e3*v12 + v13 lv2 = 1e6*v21 + 1e3*v22 + v23 return lv1 >= lv2 def minimalVersion(): "Description" - print " Les versions minimales attendues sont :" - print " - Python systeme....: %s"%minimal_python_version - print " - Numpy.............: %s"%minimal_numpy_version - print " - Scipy.............: %s"%minimal_scipy_version - print " - Matplotlib........: %s"%minimal_matplotlib_version - print + print(" Les versions minimales attendues sont :") + print(" - Python systeme....: %s"%minimal_python_version) + print(" - Numpy.............: %s"%minimal_numpy_version) + print(" - Scipy.............: %s"%minimal_scipy_version) + print(" - Matplotlib........: %s"%minimal_matplotlib_version) + print() import sys def testSysteme(): "Test des versions de modules" - print " Les versions disponibles sont :" + print(" Les versions disponibles sont :") v=sys.version.split() - print " - Python systeme....: %s"%v[0] + print(" - Python systeme....: %s"%v[0]) assert compare_versions(sys.version.split()[0], minimal_python_version) # try: import numpy - print " - Numpy.............: %s"%numpy.version.version + print(" - Numpy.............: %s"%numpy.version.version) assert compare_versions(numpy.version.version, minimal_numpy_version) except ImportError: return 1 # try: import scipy - print " - Scipy.............: %s"%scipy.version.version + print(" - Scipy.............: %s"%scipy.version.version) assert compare_versions(scipy.version.version, minimal_scipy_version) except ImportError: return 1 @@ -75,10 +75,10 @@ def testSysteme(): try: import matplotlib mplversion = matplotlib.__version__ - print " - Matplotlib........: %s"%mplversion + print(" - Matplotlib........: %s"%mplversion) assert compare_versions(mplversion, minimal_matplotlib_version) # - print + print() backends_OK = [] backends_KO = [] backend_now = matplotlib.get_backend() @@ -93,24 +93,24 @@ def testSysteme(): except ValueError: backends_KO.append(backend) # - print " Backends disponibles pour Matplotlib %s :"%mplversion - print " Defaut initial......: '%s'"%backend_now - print " Fonctionnant........:" + print(" Backends disponibles pour Matplotlib %s :"%mplversion) + print(" Defaut initial......: '%s'"%backend_now) + print(" Fonctionnant........:") for b in backends_OK: - print " '%s'"%b - print " Non fonctionnant....:" + print(" '%s'"%b) + print(" Non fonctionnant....:") for b in backends_KO: - print " '%s'"%b - print " (Le backend 'bidon' n'est ici que pour verifier le test, il n'existe pas)" + print(" '%s'"%b) + print(" (Le backend 'bidon' n'est ici que pour verifier le test, il n'existe pas)") except ImportError: pass - print + print() # return 0 # ============================================================================== if __name__ == "__main__": - print '\n AUTODIAGNOSTIC \n' + print('\n AUTODIAGNOSTIC \n') minimalVersion() sys.exit(testSysteme()) diff --git a/test/test1002/Performances.py b/test/test1002/Performances.py index 948cc07..a2fe0cf 100644 --- a/test/test1002/Performances.py +++ b/test/test1002/Performances.py @@ -26,62 +26,62 @@ import numpy, time numpy.set_printoptions(precision=5) def testSysteme(): - print " Les caracteristiques des applications et outils systeme :" - import sys ; v=sys.version.split() ; print " - Python systeme....: %s"%v[0] - import numpy ; print " - Numpy.............: %s"%numpy.version.version + print(" Les caracteristiques des applications et outils systeme :") + import sys ; v=sys.version.split() ; print(" - Python systeme....: %s"%v[0]) + import numpy ; print(" - Numpy.............: %s"%numpy.version.version) try: - import scipy ; print " - Scipy.............: %s"%scipy.version.version + import scipy ; print(" - Scipy.............: %s"%scipy.version.version) except: - print " - Scipy.............: %s"%("absent",) + print(" - Scipy.............: %s"%("absent",)) try: - import numpy.distutils.system_info as sysinfo ; la = sysinfo.get_info('lapack') ; print " - Lapack............: %s/lib%s.so"%(la['library_dirs'][0],la['libraries'][0]) + import numpy.distutils.system_info as sysinfo ; la = sysinfo.get_info('lapack') ; print(" - Lapack............: %s/lib%s.so"%(la['library_dirs'][0],la['libraries'][0])) except: - print " - Lapack............: %s"%("absent",) - print + print(" - Lapack............: %s"%("absent",)) + print() return True def testNumpy01(dimension = 3, precision = 1.e-17, repetitions = 10): "Test Numpy" __d = int(dimension) - print " Taille du test..................................: %.0e"%__d + print(" Taille du test..................................: %.0e"%__d) t_init = time.time() A = numpy.array([numpy.arange(dimension)+1.,]*__d) x = numpy.arange(__d)+1. - print " La duree elapsed moyenne de l'initialisation est: %4.1f s"%(time.time()-t_init) + print(" La duree elapsed moyenne de l'initialisation est: %4.1f s"%(time.time()-t_init)) # t_init = time.time() for i in range(repetitions): b = numpy.dot(A,x) - print " La duree elapsed pour %3i produits est de.......: %4.1f s"%(repetitions, time.time()-t_init) + print(" La duree elapsed pour %3i produits est de.......: %4.1f s"%(repetitions, time.time()-t_init)) r = [__d*(__d+1.)*(2.*__d+1.)/6.,]*__d if max(abs(b-r)) > precision: raise ValueError("Resultat du test errone (1)") else: - print " Test correct, erreur maximale inferieure a %s"%precision - print + print(" Test correct, erreur maximale inferieure a %s"%precision) + print() del A, x, b def testNumpy02(dimension = 3, precision = 1.e-17, repetitions = 100): "Test Numpy" __d = int(dimension) - print " Taille du test..................................: %.0e"%__d + print(" Taille du test..................................: %.0e"%__d) t_init = time.time() A = numpy.random.normal(0.,1.,size=(__d,__d)) x = numpy.random.normal(0.,1.,size=(__d,)) - print " La duree elapsed moyenne de l'initialisation est: %4.1f s"%(time.time()-t_init) + print(" La duree elapsed moyenne de l'initialisation est: %4.1f s"%(time.time()-t_init)) # t_init = time.time() for i in range(repetitions): b = numpy.dot(A,x) - print " La duree elapsed pour %3i produits est de.......: %4.1f s"%(repetitions, time.time()-t_init) - print + print(" La duree elapsed pour %3i produits est de.......: %4.1f s"%(repetitions, time.time()-t_init)) + print() del A, x, b # ============================================================================== if __name__ == "__main__": - print '\n AUTODIAGNOSTIC \n' + print('\n AUTODIAGNOSTIC \n') testSysteme() numpy.random.seed(1000) testNumpy01(dimension = 1.e4) testNumpy02(dimension = 3.e3) - print + print() diff --git a/test/test6701/Doc_TUI_Exemple_01.py b/test/test6701/Doc_TUI_Exemple_01.py index f2a7f91..1b7b1ec 100644 --- a/test/test6701/Doc_TUI_Exemple_01.py +++ b/test/test6701/Doc_TUI_Exemple_01.py @@ -42,11 +42,11 @@ def test1(): # ============================================================================== if __name__ == "__main__": - print '\n AUTODIAGNOSTIC \n' - print """Exemple de la doc : + print('\n AUTODIAGNOSTIC \n') + print("""Exemple de la doc : Un exemple simple de creation d'un cas de calcul TUI ADAO +++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - """ + """) xa = test1() assertAlmostEqualArrays(xa, [0.25, 0.80, 0.95], places = 5) diff --git a/test/test6701/utExtend.py b/test/test6701/utExtend.py index ec9aba5..e6efed5 100644 --- a/test/test6701/utExtend.py +++ b/test/test6701/utExtend.py @@ -32,7 +32,7 @@ import numpy def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None): "Compare two vectors, like unittest.assertAlmostEqual" if msg is not None: - print msg + print(msg) if delta is not None: if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any(): raise AssertionError("%s != %s within %s places"%(first,second,delta)) diff --git a/test/test6702/Doc_TUI_Exemple_02.py b/test/test6702/Doc_TUI_Exemple_02.py index 19c2a2a..14e6371 100644 --- a/test/test6702/Doc_TUI_Exemple_02.py +++ b/test/test6702/Doc_TUI_Exemple_02.py @@ -67,14 +67,14 @@ def test2(): # ============================================================================== if __name__ == "__main__": - print '\n AUTODIAGNOSTIC \n' - print """Exemple de la doc : + print('\n AUTODIAGNOSTIC \n') + print("""Exemple de la doc : Creation detaillee d'un cas de calcul TUI ADAO ++++++++++++++++++++++++++++++++++++++++++++++ Les deux resultats sont testes pour etre identiques. - """ + """) xa1 = test1() xa2 = test2() ecart = assertAlmostEqualArrays(xa1, xa2, places = 15) - print " Difference maximale entre les deux : %.2e"%ecart + print(" Difference maximale entre les deux : %.2e"%ecart) diff --git a/test/test6702/utExtend.py b/test/test6702/utExtend.py index ec9aba5..e6efed5 100644 --- a/test/test6702/utExtend.py +++ b/test/test6702/utExtend.py @@ -32,7 +32,7 @@ import numpy def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None): "Compare two vectors, like unittest.assertAlmostEqual" if msg is not None: - print msg + print(msg) if delta is not None: if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any(): raise AssertionError("%s != %s within %s places"%(first,second,delta)) diff --git a/test/test6703/Doc_TUI_Exemple_03.py b/test/test6703/Doc_TUI_Exemple_03.py index 44d4af6..ad1437b 100644 --- a/test/test6703/Doc_TUI_Exemple_03.py +++ b/test/test6703/Doc_TUI_Exemple_03.py @@ -94,22 +94,22 @@ def test1(): Xoptimum = case.get("Analysis")[-1] FX_at_optimum = case.get("SimulatedObservationAtOptimum")[-1] J_values = case.get("CostFunctionJ")[:] - print - print "Number of internal iterations...: %i"%len(J_values) - print "Initial state...................:",numpy.ravel(Xbackground) - print "Optimal state...................:",numpy.ravel(Xoptimum) - print "Simulation at optimal state.....:",numpy.ravel(FX_at_optimum) - print + print() + print("Number of internal iterations...: %i"%len(J_values)) + print("Initial state...................:",numpy.ravel(Xbackground)) + print("Optimal state...................:",numpy.ravel(Xoptimum)) + print("Simulation at optimal state.....:",numpy.ravel(FX_at_optimum)) + print() # return case.get("Analysis")[-1] # ============================================================================== if __name__ == "__main__": - print '\n AUTODIAGNOSTIC \n' - print """Exemple de la doc : + print('\n AUTODIAGNOSTIC \n') + print("""Exemple de la doc : Exploitation independante des resultats d'un cas de calcul ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - """ + """) xa = test1() assertAlmostEqualArrays(xa, [ 2., 3., 4.]) diff --git a/test/test6703/utExtend.py b/test/test6703/utExtend.py index ec9aba5..e6efed5 100644 --- a/test/test6703/utExtend.py +++ b/test/test6703/utExtend.py @@ -32,7 +32,7 @@ import numpy def assertAlmostEqualArrays(first, second, places=7, msg=None, delta=None): "Compare two vectors, like unittest.assertAlmostEqual" if msg is not None: - print msg + print(msg) if delta is not None: if ( (numpy.asarray(first) - numpy.asarray(second)) > float(delta) ).any(): raise AssertionError("%s != %s within %s places"%(first,second,delta)) diff --git a/test/test6901/Verification_des_Assimilation_Algorithms.py b/test/test6901/Verification_des_Assimilation_Algorithms.py index e933b95..222fb98 100644 --- a/test/test6901/Verification_des_Assimilation_Algorithms.py +++ b/test/test6901/Verification_des_Assimilation_Algorithms.py @@ -27,9 +27,9 @@ def test1(): """Verification de la disponibilite de l'ensemble des algorithmes\n(Utilisation d'un operateur matriciel)""" Xa = {} for algo in ("3DVAR", "Blue", "ExtendedBlue", "LinearLeastSquares", "NonLinearLeastSquares", "DerivativeFreeOptimization"): - print + print() msg = "Algorithme en test : %s"%algo - print msg+"\n"+"-"*len(msg) + print(msg+"\n"+"-"*len(msg)) # adaopy = adaoBuilder.New() adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) @@ -44,9 +44,9 @@ def test1(): del adaopy # for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"): - print + print() msg = "Algorithme en test : %s"%algo - print msg+"\n"+"-"*len(msg) + print(msg+"\n"+"-"*len(msg)) # adaopy = adaoBuilder.New() adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, }) @@ -63,9 +63,9 @@ def test1(): del adaopy # for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): - print + print() msg = "Algorithme en test : %s"%algo - print msg+"\n"+"-"*len(msg) + print(msg+"\n"+"-"*len(msg)) # adaopy = adaoBuilder.New() adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000, }) @@ -80,9 +80,9 @@ def test1(): del adaopy # for algo in ("EnsembleBlue", ): - print + print() msg = "Algorithme en test : %s"%algo - print msg+"\n"+"-"*len(msg) + print(msg+"\n"+"-"*len(msg)) # adaopy = adaoBuilder.New() adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"SetSeed":1000, }) @@ -96,14 +96,14 @@ def test1(): Xa[algo] = adaopy.get("Analysis")[-1] del adaopy # - print + print() msg = "Tests des ecarts attendus :" - print msg+"\n"+"="*len(msg) + print(msg+"\n"+"="*len(msg)) verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa) verify_similarity_of_algo_results(("LinearLeastSquares", "NonLinearLeastSquares"), Xa) verify_similarity_of_algo_results(("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter"), Xa) - print " Les resultats obtenus sont corrects." - print + print(" Les resultats obtenus sont corrects.") + print() # return 0 @@ -113,9 +113,9 @@ def test2(): M = numpy.matrix("1 0 0;0 2 0;0 0 3") def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T for algo in ("3DVAR", "Blue", "ExtendedBlue", "NonLinearLeastSquares", "DerivativeFreeOptimization"): - print + print() msg = "Algorithme en test : %s"%algo - print msg+"\n"+"-"*len(msg) + print(msg+"\n"+"-"*len(msg)) # adaopy = adaoBuilder.New() adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, "Bounds":[[-1,10.],[-1,10.],[-1,10.]]}) @@ -132,9 +132,9 @@ def test2(): M = numpy.matrix("1 0 0;0 2 0;0 0 3") def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T for algo in ("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter", "4DVAR"): - print + print() msg = "Algorithme en test : %s"%algo - print msg+"\n"+"-"*len(msg) + print(msg+"\n"+"-"*len(msg)) # adaopy = adaoBuilder.New() adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10, }) @@ -153,9 +153,9 @@ def test2(): M = numpy.matrix("1 0 0;0 1 0;0 0 1") def H(x): return M * numpy.asmatrix(numpy.ravel( x )).T for algo in ("ParticleSwarmOptimization", "QuantileRegression", ): - print + print() msg = "Algorithme en test : %s"%algo - print msg+"\n"+"-"*len(msg) + print(msg+"\n"+"-"*len(msg)) # adaopy = adaoBuilder.New() adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"BoxBounds":3*[[-1,3]], "SetSeed":1000, }) @@ -169,31 +169,31 @@ def test2(): Xa[algo] = adaopy.get("Analysis")[-1] del adaopy # - print + print() msg = "Tests des ecarts attendus :" - print msg+"\n"+"="*len(msg) + print(msg+"\n"+"="*len(msg)) verify_similarity_of_algo_results(("3DVAR", "Blue", "ExtendedBlue", "4DVAR", "DerivativeFreeOptimization"), Xa) verify_similarity_of_algo_results(("ExtendedKalmanFilter", "KalmanFilter", "UnscentedKalmanFilter"), Xa) - print " Les resultats obtenus sont corrects." - print + print(" Les resultats obtenus sont corrects.") + print() # return 0 def almost_equal_vectors(v1, v2, precision = 1.e-15, msg = ""): """Comparaison de deux vecteurs""" - print " Difference maximale %s: %.2e"%(msg, max(abs(v2 - v1))) + print(" Difference maximale %s: %.2e"%(msg, max(abs(v2 - v1)))) return max(abs(v2 - v1)) < precision def verify_similarity_of_algo_results(serie = [], Xa = {}): - print " Comparaisons :" + print(" Comparaisons :") for algo1 in serie: for algo2 in serie: if algo1 is algo2: break assert almost_equal_vectors( Xa[algo1], Xa[algo2], 5.e-5, "entre %s et %s "%(algo1, algo2) ) - print " Algorithmes dont les resultats sont similaires : %s\n"%(serie,) + print(" Algorithmes dont les resultats sont similaires : %s\n"%(serie,)) #=============================================================================== if __name__ == "__main__": - print '\n AUTODIAGNOSTIC \n' + print('\n AUTODIAGNOSTIC \n') test1() test2() diff --git a/test/test6902/Verification_des_Checking_Algorithms.py b/test/test6902/Verification_des_Checking_Algorithms.py index 63a331a..e40d23f 100644 --- a/test/test6902/Verification_des_Checking_Algorithms.py +++ b/test/test6902/Verification_des_Checking_Algorithms.py @@ -25,9 +25,9 @@ import adaoBuilder # ============================================================================== def test1(): for algo in ("AdjointTest", "FunctionTest", "GradientTest", "LinearityTest", "TangentTest"): - print + print() msg = "Algorithme en test : %s"%algo - print msg+"\n"+"-"*len(msg) + print(msg+"\n"+"-"*len(msg)) # adaopy = adaoBuilder.New() adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={"EpsilonMinimumExponent":-10,"NumberOfRepetition":2, "SetSeed":1000}) @@ -40,9 +40,9 @@ def test1(): del adaopy # for algo in ("ObserverTest", ): - print + print() msg = "Algorithme en test : %s"%algo - print msg+"\n"+"-"*len(msg) + print(msg+"\n"+"-"*len(msg)) # adaopy = adaoBuilder.New() adaopy.setAlgorithmParameters(Algorithm=algo) @@ -56,9 +56,9 @@ def test1(): del adaopy # for algo in ("SamplingTest", ): - print + print() msg = "Algorithme en test : %s"%algo - print msg+"\n"+"-"*len(msg) + print(msg+"\n"+"-"*len(msg)) # adaopy = adaoBuilder.New() adaopy.setAlgorithmParameters(Algorithm=algo, Parameters={ @@ -76,5 +76,5 @@ def test1(): # ============================================================================== if __name__ == "__main__": - print '\n AUTODIAGNOSTIC \n' + print('\n AUTODIAGNOSTIC \n') test1()