# -*- coding: utf-8 -*-
#
-# Copyright (C) 2008-2017 EDF R&D
+# Copyright (C) 2008-2022 EDF R&D
#
# This file is part of SALOME ADAO module
#
if sys.version_info.major > 2:
def unicode(text, encoding='utf-8'): return text
+print("-- Starting AdaoCalatogGenerator.py --")
+
+try:
+ import adao
+ import daEficas
+ import daYacsSchemaCreator
+ import daCore.Aidsm
+ import daYacsSchemaCreator.infos_daComposant as infos
+except:
+ logging.fatal("Import of ADAO python modules failed !" +
+ "\n add ADAO python installation directory in your PYTHONPATH")
+ traceback.print_exc()
+ sys.exit(1)
+
+#===============================================================================
+
#----------- Templates Part ---------------#
+
begin_catalog_file = """# -*- coding: utf-8 -*-
#
-# Copyright (C) 2008-2017 EDF R&D
+# Copyright (C) 2008-2022 EDF R&D
#
# This file is part of SALOME ADAO module
#
JdC = JDC_CATA (
code = '%s',
execmodul = None,
- regles = ( AU_MOINS_UN ('ASSIMILATION_STUDY','CHECKING_STUDY'), AU_PLUS_UN ('ASSIMILATION_STUDY','CHECKING_STUDY')),
+ regles = ( AU_MOINS_UN ('ASSIMILATION_STUDY','OPTIMIZATION_STUDY','REDUCTION_STUDY','CHECKING_STUDY'), AU_PLUS_UN ('ASSIMILATION_STUDY','OPTIMIZATION_STUDY','REDUCTION_STUDY','CHECKING_STUDY')),
)
VERSION_CATALOGUE='%s'
return 1
NoCheckInNS.info = u""
def DirectOperatorInNS(filename):
- if os.path.exists(filename):
+ if os.path.isfile(filename):
fc = open(filename, 'r').readlines()
cr = re.compile("^def[\s]*DirectOperator[\s]*\(")
for ln in fc:
return 0
DirectOperatorInNS.info = u"The Python file has to contain explicitly a \\"DirectOperator\\" function definition with only one vector as argument."
def TangentOperatorInNS(filename):
- if os.path.exists(filename):
+ if os.path.isfile(filename):
fc = open(filename, 'r').readlines()
cr = re.compile("^def[\s]*TangentOperator[\s]*\(")
for ln in fc:
return 0
TangentOperatorInNS.info = u"The Python file has to contain explicitly a \\"TangentOperator\\" function definition with only one pair of vectors as argument."
def AdjointOperatorInNS(filename):
- if os.path.exists(filename):
+ if os.path.isfile(filename):
fc = open(filename, 'r').readlines()
cr = re.compile("^def[\s]*AdjointOperator[\s]*\(")
for ln in fc:
if cr.match(ln): return 1
return 0
AdjointOperatorInNS.info = u"The Python file has to contain explicitly an \\"AdjointOperator\\" function definition with only one pair of vectors as argument."
-"""%(module_version.name,module_version.version)
+def ColDataFileExtVal(filename):
+ __readable = (".csv", ".tsv", ".txt", ".npy", ".npz")
+ if os.path.isfile(filename) and os.path.splitext(filename)[1] in __readable:
+ return 1
+ return 0
+ColDataFileExtVal.info = u"The data file has to contain explicitly one or more number columns with separator, or one variable, that can fit in a unique continuous vector."
+"""%(module_version.name,module_version.cata)
+
+# --------------------------------------
+
+from daCore.Templates import UserPostAnalysisTemplates
+upa_list = UserPostAnalysisTemplates.keys_in_presentation_order()
+upa_list = '"%s"'%str('", "'.join(upa_list))
+upa_cont = ""
+for k in UserPostAnalysisTemplates.keys_in_presentation_order():
+ upa_cont += """ %s = BLOC (condition = " Template == '%s' ",\n"""%(k,k)
+ upa_cont += """ ValueTemplate = SIMP(statut = "o", typ = "TXM", min=1, max=1, defaut = "%s", fr="%s", ang="%s" ),\n"""%(
+ UserPostAnalysisTemplates[k].replace("\n","\\n").replace('"','\\"'),
+ UserPostAnalysisTemplates.getdoc(k, "fr_FR"),
+ UserPostAnalysisTemplates.getdoc(k, "en_EN"),
+ )
+ upa_cont += """ ),\n"""
-# Important : validators=[...] pour que les conditions soient traitees simultanement, en "ET", et pas en "OU" (choisi dans le cas du tuple a la place de la liste)
+# Important : validators=[...] pour que les conditions soient traitées simultanément, en "ET", et pas en "OU" (choisi dans le cas du tuple a la place de la liste)
# validators=[OnlyStr(), FileExtVal('py'), FunctionVal(fv)]
data_method = """
def F_{data_name}(statut, fv=NoCheckInNS) : return FACT(
statut = statut,
FROM = SIMP(statut = "o", typ = "TXM", into=({data_into}), defaut={data_default}),
SCRIPT_DATA = BLOC ( condition = " FROM in ( 'Script', ) ",
- SCRIPT_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py'), FunctionVal(fv)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant si nécessaire la définition d'une variable interne de même nom que le concept parent", ang="Waiting for a script file name, with or without the full path to find it, containing if necessary the definition of an internal variable of the same name as the parent concept"),
+ SCRIPT_FILE = SIMP(statut = "o", typ = ("FichierNoAbs",'Python Files (*.py)',), validators=[OnlyStr(), FileExtVal('py'), FunctionVal(fv)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant si nécessaire la définition d'une variable interne de même nom que le concept parent", ang="Waiting for a script file name, with or without the full path to find it, containing if necessary the definition of an internal variable of the same name as the parent concept"),
+ ),
+ DATA_DATA = BLOC ( condition = " FROM in ( 'DataFile', ) ",
+ DATA_FILE = SIMP(statut = "o", typ = ("FichierNoAbs",'CSV Text Files (*.csv);;TSV Text Files (*.tsv);;TXT Text Files (*.txt);;NPY Binary Numpy Files (*.npy);;NPZ Binary Numpy Files (*.npz);;All Files (*)", ',), validators=[OnlyStr(), FunctionVal(ColDataFileExtVal)], fr="En attente d'un nom de fichier de données, avec ou sans le chemin complet pour le trouver, contenant ou plusieurs colonnes pour définir un unique vecteur continu", ang="Waiting for a data file name, with or without the full path to find it, containing one or more columns to define a unique continuous vector"),
+ ColMajor = SIMP(statut="f", typ = "I", into=(0, 1), defaut=0, fr="Variables en colonnes acquises ligne par ligne (0) ou colonne par colonne (1)", ang="Variables in columns acquired line by line (0) or column by column (1)"),
),
STRING_DATA = BLOC ( condition = " FROM in ( 'String', ) ",
- STRING = SIMP(statut = "o", typ = "TXM",{ms_default} fr="En attente d'une chaine de caractères entre guillements. Pour construire un vecteur ou une matrice, ce doit être une suite de nombres, utilisant un espace ou une virgule pour séparer deux éléments et un point-virgule pour séparer deux lignes", ang="Waiting for a string in quotes. To build a vector or a matrix, it has to be a float serie, using a space or comma to separate two elements in a line, a semi-colon to separate rows"),
+ STRING = SIMP(statut = "o", typ = "TXM",{ms_default} fr="En attente d'une chaîne de caractères entre guillemets. Pour construire un vecteur ou une matrice, ce doit être une suite de nombres, utilisant un espace ou une virgule pour séparer deux éléments et un point-virgule pour séparer deux lignes", ang="Waiting for a string in quotes. To build a vector or a matrix, it has to be a float serie, using a space or comma to separate two elements in a line, a semi-colon to separate rows"),
),
SCRIPTWITHFUNCTIONS_DATA = BLOC ( condition = " FROM in ( 'ScriptWithFunctions', ) ",
SCRIPTWITHFUNCTIONS_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py'), FunctionVal(DirectOperatorInNS), FunctionVal(TangentOperatorInNS), FunctionVal(AdjointOperatorInNS)], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant en variables internes trois fonctions de calcul nommées DirectOperator, TangentOperator et AdjointOperator", ang="Waiting for a script file name, with or without the full path to find it, containing as internal variables three computation functions named DirectOperator, TangentOperator and AdjointOperator"),
SCRIPTWITHSWITCH_DATA = BLOC ( condition = " FROM in ( 'ScriptWithSwitch', ) ",
SCRIPTWITHSWITCH_FILE = SIMP(statut = "o", typ = "FichierNoAbs", validators=[OnlyStr(), FileExtVal('py')], fr="En attente d'un nom de fichier script, avec ou sans le chemin complet pour le trouver, contenant un switch pour les calculs direct, tangent et adjoint", ang="Waiting for a script file name, with or without the full path to find it, containing a switch for direct, tangent and adjoint computations"),
),
- TEMPLATE_DATA = BLOC (condition = " FROM in ( 'Template', ) ",
- Template = SIMP(statut = "o", typ = "TXM", min=1, max=1, defaut = "AnalysisPrinter", into=("AnalysisPrinter", "AnalysisSaver", "AnalysisPrinterAndSaver")),
- AnalysisPrinter = BLOC (condition = " Template == 'AnalysisPrinter' ",
- ValueTemplate = SIMP(statut = "o", typ = "TXM", min=1, max=1, defaut = "import numpy\\nxa=numpy.ravel(ADD.get('Analysis')[-1])\\nprint 'Analysis:',xa" ),
- ),
- AnalysisSaver = BLOC (condition = " Template == 'AnalysisSaver' ",
- ValueTemplate = SIMP(statut = "o", typ = "TXM", min=1, max=1, defaut = "import numpy\\nxa=numpy.ravel(ADD.get('Analysis')[-1])\\nf='/tmp/analysis.txt'\\nprint 'Analysis saved in \\"%s\\"'%f\\nnumpy.savetxt(f,xa)" ),
- ),
- AnalysisPrinterAndSaver = BLOC (condition = " Template == 'AnalysisPrinterAndSaver' ",
- ValueTemplate = SIMP(statut = "o", typ = "TXM", min=1, max=1, defaut = "import numpy\\nxa=numpy.ravel(ADD.get('Analysis')[-1])\\nprint 'Analysis:',xa\\nf='/tmp/analysis.txt'\\nprint 'Analysis saved in \\"%s\\"'%f\\nnumpy.savetxt(f,xa)" ),
- ),
- ),
+"""+\
+""" TEMPLATE_DATA = BLOC (condition = " FROM in ( 'Template', ) ",
+ Template = SIMP(statut = "o", typ = "TXM", min=1, max=1, defaut = "AnalysisPrinter", into=(%s)),
+"""%(upa_list,)+\
+upa_cont+\
+""" ),
)
"""
+# --------------------------------------
+
init_method = """
def F_InitChoice() : return ("Background",
"BackgroundError",
assim_data_method = """
def {assim_name}InNS(filename):
- if os.path.exists(filename):
+ if os.path.isfile(filename):
fc = open(filename, 'r').readlines()
cr = re.compile("^{assim_name}[\s]*=")
for ln in fc:
),
),"""
+# --------------------------------------
+
from daCore.Templates import ObserverTemplates
observers_list = ObserverTemplates.keys_in_presentation_order()
observers_list = '"%s"'%str('", "'.join(observers_list))
ObserverTemplates.getdoc(k, "en_EN"),
)
observers_cont += """ ),\n"""
+
observers_method = """
def F_ObserverTemplate() : return BLOC(condition = " NodeType == 'Template' ",
Template = SIMP(statut = "o", typ = "TXM", min=1, max=1, defaut = "ValuePrinter", into=(%s)),
)
"""%(observers_list,observers_cont)
+# --------------------------------------
+
algo_choices = """
def AlgorithmParametersInNS(filename):
- if os.path.exists(filename):
+ if os.path.isfile(filename):
fc = open(filename, 'r').readlines()
cr = re.compile("^AlgorithmParameters[\s]*=")
for ln in fc:
statut="f",
{algo_parameters} ),"""
+# --------------------------------------
+
assim_study = """
def F_variables(statut) : return FACT(
statut=statut,
StudyName = SIMP(statut="o", typ = "TXM", defaut="ADAO Calculation Case"),
StudyRepertory = SIMP(statut="f", typ = "Repertoire", validators=FunctionVal(ChDir), min=1, max=1),
Debug = SIMP(statut="f", typ = "I", into=(0, 1), defaut=0),
+ ExecuteInContainer = SIMP(statut="f", typ = "TXM", min=1, max=1, defaut = "No", into=("No", "Mono", "Multi")),
AlgorithmParameters = F_AlgorithmParameters("o",({algos_names}), AlgorithmParametersInNS),
Background = F_Background("o", BackgroundInNS),
BackgroundError = F_BackgroundError("o", BackgroundErrorInNS),
Observation = F_Observation("o", ObservationInNS),
ObservationError = F_ObservationError("o", ObservationErrorInNS),
- ObservationOperator = F_ObservationOperator("o"),
- EvolutionModel = F_EvolutionModel("f"),
+ ObservationOperator = F_ObservationOperator("o", ObservationOperatorInNS),
+ EvolutionModel = F_EvolutionModel("f", EvolutionModelInNS),
+ EvolutionError = F_EvolutionError("f", EvolutionErrorInNS),
+ ControlInput = F_ControlInput("f"),
+ UserDataInit = F_Init("f"),
+ UserPostAnalysis = F_UserPostAnalysis("o"),
+ InputVariables = F_variables("f"),
+ OutputVariables = F_variables("f"),
+ Observers = F_Observers("f")
+ )
+
+OPTIMIZATION_STUDY = PROC(nom="OPTIMIZATION_STUDY",
+ op=None,
+ repetable = "n",
+ StudyName = SIMP(statut="o", typ = "TXM", defaut="ADAO Calculation Case"),
+ StudyRepertory = SIMP(statut="f", typ = "Repertoire", validators=FunctionVal(ChDir), min=1, max=1),
+ Debug = SIMP(statut="f", typ = "I", into=(0, 1), defaut=0),
+ ExecuteInContainer = SIMP(statut="f", typ = "TXM", min=1, max=1, defaut = "No", into=("No", "Mono", "Multi")),
+ AlgorithmParameters = F_AlgorithmParameters("o",({optim_names}), AlgorithmParametersInNS),
+ Background = F_Background("o", BackgroundInNS),
+ BackgroundError = F_BackgroundError("f", BackgroundErrorInNS),
+ Observation = F_Observation("o", ObservationInNS),
+ ObservationError = F_ObservationError("f", ObservationErrorInNS),
+ ObservationOperator = F_ObservationOperator("o", ObservationOperatorInNS),
+ EvolutionModel = F_EvolutionModel("f", EvolutionModelInNS),
+ EvolutionError = F_EvolutionError("f", EvolutionErrorInNS),
+ ControlInput = F_ControlInput("f"),
+ UserDataInit = F_Init("f"),
+ UserPostAnalysis = F_UserPostAnalysis("o"),
+ InputVariables = F_variables("f"),
+ OutputVariables = F_variables("f"),
+ Observers = F_Observers("f")
+ )
+
+REDUCTION_STUDY = PROC(nom="REDUCTION_STUDY",
+ op=None,
+ repetable = "n",
+ StudyName = SIMP(statut="o", typ = "TXM", defaut="ADAO Calculation Case"),
+ StudyRepertory = SIMP(statut="f", typ = "Repertoire", validators=FunctionVal(ChDir), min=1, max=1),
+ Debug = SIMP(statut="f", typ = "I", into=(0, 1), defaut=0),
+ ExecuteInContainer = SIMP(statut="f", typ = "TXM", min=1, max=1, defaut = "No", into=("No", "Mono", "Multi")),
+ AlgorithmParameters = F_AlgorithmParameters("o",({reduc_names}), AlgorithmParametersInNS),
+ Background = F_Background("o", BackgroundInNS),
+ BackgroundError = F_BackgroundError("o", BackgroundErrorInNS),
+ Observation = F_Observation("o", ObservationInNS),
+ ObservationError = F_ObservationError("o", ObservationErrorInNS),
+ ObservationOperator = F_ObservationOperator("o", ObservationOperatorInNS),
+ EvolutionModel = F_EvolutionModel("f", EvolutionModelInNS),
EvolutionError = F_EvolutionError("f", EvolutionErrorInNS),
ControlInput = F_ControlInput("f"),
UserDataInit = F_Init("f"),
StudyName = SIMP(statut="o", typ = "TXM", defaut="ADAO Checking Case"),
StudyRepertory = SIMP(statut="f", typ = "Repertoire", validators=FunctionVal(ChDir), min=1, max=1),
Debug = SIMP(statut="f", typ = "I", into=(0, 1), defaut=0),
+ ExecuteInContainer = SIMP(statut="f", typ = "TXM", min=1, max=1, defaut = "No", into=("No", "Mono", "Multi")),
AlgorithmParameters = F_AlgorithmParameters("o", ({check_names}), AlgorithmParametersInNS),
CheckingPoint = F_CheckingPoint("o", CheckingPointInNS),
+ Background = F_Background("f", BackgroundInNS),
BackgroundError = F_BackgroundError("f", BackgroundErrorInNS),
Observation = F_Observation("f", ObservationInNS),
ObservationError = F_ObservationError("f", ObservationErrorInNS),
- ObservationOperator = F_ObservationOperator("o"),
+ ObservationOperator = F_ObservationOperator("o", ObservationOperatorInNS),
UserDataInit = F_Init("f"),
Observers = F_Observers("f")
)
#----------- End of Templates Part ---------------#
-
+#===============================================================================
#----------- Begin generation script -----------#
-print("-- Starting AdaoCalatogGenerator.py --")
-
-try:
- import daEficas
- import daYacsSchemaCreator
- import daCore.AssimilationStudy
- import daYacsSchemaCreator.infos_daComposant as infos
-except:
- logging.fatal("Import of ADAO python modules failed !" +
- "\n add ADAO python installation directory in your PYTHONPATH")
- traceback.print_exc()
- sys.exit(1)
# Parse arguments
from argparse import ArgumentParser
# Generates into a string
mem_file = io.StringIO()
-# Start file
+# Initial step: On ouvre le fichier
from time import strftime
mem_file.write(unicode(begin_catalog_file, 'utf-8').format(**{'date':strftime("%Y-%m-%d %H:%M:%S")}))
-# Step initial: on obtient la liste des algos
+# Step 0: on obtient la liste des algos
algos_names = ""
+optim_names = ""
+reduc_names = ""
check_names = ""
-decl_algos = ""
-assim_study_object = daCore.AssimilationStudy.AssimilationStudy()
-algos_list = assim_study_object.get_available_algorithms()
+task_names = ""
+adao_all_names = ""
+assim_study_object = daCore.Aidsm.Aidsm()
+algos_list = assim_study_object._Aidsm__get_available_algorithms()
del assim_study_object
for algo_name in algos_list:
if algo_name in infos.AssimAlgos:
logging.debug("An assimilation algorithm is found: " + algo_name)
algos_names += "\"" + algo_name + "\", "
- elif algo_name in infos.CheckAlgos:
+ if algo_name in infos.OptimizationAlgos:
+ logging.debug("An optimization algorithm is found: " + algo_name)
+ optim_names += "\"" + algo_name + "\", "
+ if algo_name in infos.ReductionAlgos:
+ logging.debug("A reduction algorithm is found: " + algo_name)
+ reduc_names += "\"" + algo_name + "\", "
+ if algo_name in infos.CheckAlgos:
logging.debug("A checking algorithm is found: " + algo_name)
check_names += "\"" + algo_name + "\", "
- else:
- logging.debug("This algorithm is not considered: " + algo_name)
-
-# Step 1: A partir des infos, on cree les fonctions qui vont permettre
-# d'entrer les donnees utilisateur
+ if algo_name in infos.TaskAlgos:
+ logging.debug("A task algorithm is found: " + algo_name)
+ task_names += "\"" + algo_name + "\", "
+ if algo_name in infos.AssimAlgos+infos.OptimizationAlgos+infos.ReductionAlgos+infos.CheckAlgos+infos.TaskAlgos:
+ # Pour filtrer sur les algorithmes vraiment interfacés, car il peut y en avoir moins que "algos_list"
+ adao_all_names += "\"" + algo_name + "\", "
+
+# Step 1: A partir des infos, on crée les fonctions qui vont permettre
+# d'entrer les données utilisateur
for data_input_name in infos.DataTypeDict:
logging.debug('A data input Type is found: ' + data_input_name)
data_name = data_input_name
data_default = ""
ms_default = ""
- # On recupere les differentes facon d'entrer les donnees
+ # On récupère les différentes façon d'entrer les données
for basic_type in infos.DataTypeDict[data_input_name]:
data_into += "\"" + basic_type + "\", "
- # On choisit le default
+ # On choisit le défaut
data_default = "\"" + infos.DataTypeDefaultDict[data_input_name] + "\""
if data_input_name in infos.DataSValueDefaultDict:
ms_default = " defaut=\"" + infos.DataSValueDefaultDict[data_input_name] + "\","
'data_into' : data_into,
'data_default' : data_default,
'ms_default' : ms_default,
- 'algos_names' : algos_names+check_names,
}))
-# Step 2: On cree les fonctions qui permettent de rentrer les donnees des algorithmes
+# Step 2: On crée les fonctions qui permettent de rentrer les données des algorithmes
for assim_data_input_name in infos.AssimDataDict:
logging.debug("An input function data input is found: " + assim_data_input_name)
# assim_name = assim_data_input_name
'default_choice' : default_choice,
}))
-# Step 3: On ajoute les fonctions representant les options possibles
+# Step 3: On ajoute les fonctions représentant les options possibles
for opt_name in infos.OptDict:
logging.debug("An optional node is found: " + opt_name)
data_name = opt_name
'data_into' : data_into,
'data_default' : data_default,
'ms_default' : ms_default,
- 'algos_names' : algos_names+check_names,
}))
-# Step 4: On ajoute la methode optionnelle init
-# TODO uniformiser avec le step 3
+# Step 3bis: On ajoute la méthode optionnelle init
+# TODO si possible uniformiser avec le step 3
mem_file.write(unicode(init_method, 'utf-8'))
-# Step 5: Add observers
+# Step 4: On ajoute les observers
decl_choices = ""
for obs_var in infos.ObserversList:
decl_choices += observers_choice.format(**{'var_name':obs_var})
'decl_choices' : decl_choices,
}))
-# Step 5: Add algorithmic choices
-
-all_names = eval((algos_names+check_names))
+# Step 5: On ajoute les choix algorithmiques
+all_names = eval((adao_all_names))
all_algo_defaults = ""
for algo in all_names:
- assim_study_object = daCore.AssimilationStudy.AssimilationStudy()
+ assim_study_object = daCore.Aidsm.Aidsm()
assim_study_object.setAlgorithmParameters(Algorithm=algo)
par_dict = assim_study_object.get("AlgorithmRequiredParameters",False)
par_keys = sorted(par_dict.keys())
elif pt is float:
algo_parameters += """ %s = SIMP(statut="f", typ="R"%s%s, min=1, max=1, defaut=%s, fr="%s"),\n"""%(pn,vi,vs,float(pd),pm)
elif pt is bool:
- algo_parameters += """ %s = SIMP(statut="f", typ="I", min=1, max=1, defaut=%s, fr="%s"),\n"""%(pn,int(pd),pm)
+ algo_parameters += """ %s = SIMP(statut="f", typ="I", into=(0, 1), min=1, max=1, defaut=%s, fr="%s"),\n"""%(pn,int(pd),pm)
elif pt is str and "listval" in par_dict[pn]:
algo_parameters += """ %s = SIMP(statut="f", typ="TXM", min=1, max=1, defaut="%s", into=%s, fr="%s"),\n"""%(pn,pd,par_dict[pn]["listval"],pm)
elif pt is tuple and "listval" in par_dict[pn]:
})
mem_file.write(unicode(algo_choices, 'utf-8').format(**{'all_algo_defaults':unicode(all_algo_defaults, 'utf-8')}))
-# Final step: Add algorithm and assim_study
+# Step 6: On ajoute l'algorithme et le assim_study
mem_file.write(unicode(assim_study, 'utf-8').format(**{
'algos_names':algos_names,
+ 'optim_names':optim_names,
+ 'reduc_names':reduc_names,
'check_names':check_names,
- 'decl_algos':decl_algos,
+ 'task_names':task_names,
}))
-# Write file
+# Final step: On écrit le fichier
if sys.version_info.major > 2:
with open(os.path.join(args.catalog_path, args.catalog_name), "w", encoding='utf8') as final_file:
final_file.write(mem_file.getvalue())
with open(os.path.join(args.catalog_path, args.catalog_name), "wr") as final_file:
final_file.write(mem_file.getvalue().encode('utf-8'))
mem_file.close()
+
+#----------- End generation script -----------#
+
+#===============================================================================