studyexception.py
studyresult.py
salome_proxy.py
+ multijobbuilder.py
+ multijobstudy.py
+ noyacsbuilder.py
+ noyacsstudy.py
)
INSTALL(FILES ${SCRIPTS} DESTINATION ${SALOME_INSTALL_PYTHON}/pydefx)
ADD_SUBDIRECTORY(schemas)
+ADD_SUBDIRECTORY(multijob)
+ADD_SUBDIRECTORY(noyacs)
from .defaultschemabuilder import DefaultSchemaBuilder
from .salome_proxy import forceSalomeServers, forceNoSalomeServers
+from .multijobstudy import MultiJobStudy
+from .noyacsstudy import NoYacsStudy
--- /dev/null
+SET(SCHEMA_FILES
+ executor.py
+ mainjob.py
+ pointeval.py
+ )
+
+INSTALL(FILES ${SCHEMA_FILES} DESTINATION ${SALOME_INSTALL_PYTHON}/pydefx/multijob)
--- /dev/null
+import pydefx
+import os
+import pickle
+import time
+import traceback
+
+class Context:
+ def __init__(self):
+ self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
+ pass
+
+class JobExecutor:
+ def __init__(self, config):
+ self.config = config
+
+ def initialize(self):
+ """ This is executed before the first evaluation.
+ Put here global processing needed by all the evaluations like the copy of
+ commun files.
+ """
+ # Copy the commun files to the root work directory
+ params = pydefx.Parameters() # global parameters
+ params.loadDict(self.config["params"])
+ # use a fake empty command
+ command = os.path.join(os.getcwd(), "empty.sh")
+ open(command, "w").close()
+ params.salome_parameters.job_file = command
+ params.salome_parameters.job_type = "command"
+ study_module = os.path.join(os.getcwd(), self.config["studymodule"]+".py")
+ infiles = list(params.salome_parameters.in_files)
+ params.salome_parameters.in_files = infiles + [study_module]
+ launcher = pydefx.salome_proxy.getLauncher()
+ job_id = launcher.createJob(params.salome_parameters)
+ launcher.exportInputFiles(job_id)
+
+ def evaluate(self, idx, point):
+ """ This is executed for every point to be evaluated.
+ """
+ context = Context()
+ error = None
+ out_values = None
+ try:
+ self.prepare(idx, point, context)
+ if self.noRunFound(idx, point, context):
+ self.runjob(idx, point, context)
+ error, out_values = self.getResult(context)
+ except Exception as e:
+ error = str(e)
+ traceback.print_exc()
+ return error, out_values
+
+ def prepare(self, idx, point, context):
+ """
+ Define local and remote work directory.
+ Define job script.
+ """
+ context.params = pydefx.Parameters()
+ context.params.loadDict(self.config["params"])
+ salome_parameters = context.params.salome_parameters
+ root_local_dir = salome_parameters.result_directory
+ root_remote_dir = salome_parameters.work_directory
+ input_files = [] # commun files are already copied to the root directory
+ point_name = "job_"+str(idx)
+ context.local_dir = os.path.join(root_local_dir, point_name)
+ point_remote_dir = os.path.join(root_remote_dir, point_name)
+ if not os.path.exists(context.local_dir):
+ os.mkdir(context.local_dir)
+ # export the point to a file
+ data_file_name = "idefixdata.csv"
+ data_file_path = os.path.join(context.local_dir, data_file_name)
+ with open(data_file_path, "w") as f:
+ # explicit dict convertion is needed for compatibility between python versions
+ f.write(repr(dict(point)))
+ input_files.append(data_file_path)
+
+ #command_path = os.path.join(root_local_dir, "command.py")
+ #salome_parameters.job_type = "command_salome"
+ #salome_parameters.job_file = command_path
+
+ salome_parameters.in_files = input_files
+ salome_parameters.out_files = ["idefixresult.txt", "idefixerror.txt"]
+ salome_parameters.work_directory = point_remote_dir
+ salome_parameters.result_directory = context.local_dir
+
+ def noRunFound(self, idx, point, context):
+ return True
+
+ def runjob(self, idx, point, context):
+ """
+ Create, launch and wait for the end of the job.
+ """
+ import random
+ sleep_delay = random.randint(5, 15) #10
+ #launcher = pydefx.salome_proxy.getLauncher()
+ launcher = context.launcher
+ context.job_id = launcher.createJob(context.params.salome_parameters)
+ launcher.launchJob(context.job_id)
+ jobState = launcher.getJobState(context.job_id)
+ while jobState=="QUEUED" or jobState=="IN_PROCESS" or jobState=="RUNNING" :
+ time.sleep(sleep_delay)
+ jobState = launcher.getJobState(context.job_id)
+
+ def getResult(self, context):
+ """
+ Check the job state, fetch the result file.
+ """
+ #launcher = pydefx.salome_proxy.getLauncher()
+ launcher = context.launcher
+ jobState = launcher.getJobState(context.job_id)
+ error=""
+ result=None
+ if jobState != "FINISHED" :
+ error = "Job has not finished correctly."
+ else:
+ launcher.getJobResults(context.job_id, "")
+ error_file = os.path.join(context.local_dir, "idefixerror.txt")
+ result_file = os.path.join(context.local_dir, "idefixresult.txt")
+ with open(error_file, "r") as f:
+ error = f.read()
+ with open(result_file, "r") as f:
+ result_str = f.read()
+ result = eval(result_str)
+
+ return error, result
+
+def createExecutor(config):
+ return JobExecutor(config)
--- /dev/null
+#! /usr/bin/env python3
+import json
+import importlib
+from multiprocessing.dummy import Pool
+import traceback
+
+class StartJob:
+ def __init__(self, executor):
+ self.executor = executor
+
+ def __call__(self, idx, in_values):
+ error=None
+ out_values=None
+ try:
+ error, out_values = self.executor.evaluate(idx, in_values)
+ except Exception as e:
+ error=str(e)
+ traceback.print_exc()
+ return idx, in_values, out_values, error
+
+class TerminateJob:
+ def __init__(self, manager):
+ self.manager = manager
+
+ def __call__(self, result):
+ # without try statement we may experience deadlock in case of error.
+ try:
+ idx, in_values, out_values, error = result
+ if not error:
+ error = None
+ self.manager.addResult(idx, in_values, out_values, error)
+ except Exception as e:
+ traceback.print_exc()
+
+if __name__ == '__main__':
+ with open("idefixconfig.json", "r") as f:
+ config = json.load(f)
+ plugin_module = importlib.import_module(config["plugin"])
+ executor = plugin_module.createExecutor(config)
+ # global initialization - commun work for every evaluation.
+ executor.initialize()
+
+ itModuleName = config["sampleIterator"]
+ itModule = importlib.import_module(itModuleName)
+ sampleManager = itModule.SampleIterator()
+ sampleManager.writeHeaders()
+
+ nbbranches=config["nbbranches"]
+ pool = Pool(nbbranches)
+ runPoint = StartJob(executor)
+ endOk = TerminateJob(sampleManager)
+ for point in sampleManager:
+ pool.apply_async(runPoint, point, callback=endOk)
+ pool.close()
+ pool.join()
+ sampleManager.terminate()
--- /dev/null
+#! /usr/bin/env python3
+import traceback
+import os
+
+data_file_name = "idefixdata.csv"
+study_module = "idefixstudy.py"
+error_result = "idefixerror.txt"
+value_result = "idefixresult.txt"
+
+with open(data_file_name, "r") as f:
+ values = f.read()
+inputvals = eval(values)
+
+error=""
+result=None
+old_dir = os.getcwd()
+
+try:
+ os.chdir("..") # go to commun root directory
+ with open(study_module, "r") as study_file:
+ study_string = study_file.read()
+ exec(study_string)
+ result = _exec(**inputvals)
+except Exception as e:
+ error=str(e)
+ traceback.print_exc()
+
+os.chdir(old_dir) # back to the current case job directory
+
+with open(error_result, "w") as f:
+ f.write(error)
+
+with open(value_result, "w") as f:
+ f.write(repr(result))
--- /dev/null
+# -*- coding: utf-8 -*-
+# Copyright (C) 2019 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+import inspect
+import pathlib
+import os
+
+class MultiJobBuilder:
+ def __init__(self, executor = None, pointEval = None, mainJob = None):
+ filename = inspect.getframeinfo(inspect.currentframe()).filename
+ install_root_directory = pathlib.Path(filename).resolve().parent
+ install_files_directory = os.path.join(install_root_directory, "multijob")
+
+ if executor is None:
+ executor = os.path.join(install_files_directory, "executor.py")
+ self.executor = executor
+
+ if pointEval is None:
+ pointEval = os.path.join(install_files_directory, "pointeval.py")
+ self.pointEval = pointEval
+
+ if mainJob is None:
+ mainJob = os.path.join(install_files_directory, "mainjob.py")
+ self.mainJob = mainJob
+
+ def getMainJob(self):
+ return self.mainJob
+
+ def getExecutor(self):
+ return self.executor
+
+ def getPointEval(self):
+ return self.pointEval
+
+ def getPluginName(self):
+ basename = os.path.basename(self.executor)
+ if not basename.endswith(".py"):
+ raise Exception("File name {} does not end with '.py'.".format(
+ self.executor))
+ return basename[:-3]
--- /dev/null
+# -*- coding: utf-8 -*-
+# Copyright (C) 2019 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+import copy
+import os
+import json
+from . import pystudy
+from . import multijobbuilder
+from . import salome_proxy
+
+
+class MultiJobStudy(pystudy.PyStudy):
+ def __init__(self, sampleManager=None, schemaBuilder=None):
+ if schemaBuilder is None:
+ schemaBuilder = multijobbuilder.MultiJobBuilder()
+ super().__init__(sampleManager, schemaBuilder)
+
+ def createNewJob(self, script, sample, params):
+ self._check(script,sample)
+ self.sample = sample
+ self.params = copy.deepcopy(params)
+ main_job_work_dir = self.params.salome_parameters.result_directory
+ params_dic = params.dumpDict()
+ params_dic["salome_parameters"]["job_type"] = "command_salome"
+ params_dic["salome_parameters"]["job_file"] = self.schemaBuilder.getPointEval()
+ params_dic["salome_parameters"]["local_directory"] = main_job_work_dir
+ # set the parameters of the local job
+ self.params.salome_parameters.resource_required.name = "localhost"
+ self.params.salome_parameters.job_type = "command_salome" #"python_salome"
+
+ self.params.salome_parameters.work_directory = main_job_work_dir
+ self.params.createTmpResultDirectory()
+ result_directory = self.params.salome_parameters.result_directory
+ # export sample to result_directory
+ inputFiles = self.sampleManager.prepareRun(self.sample, result_directory)
+ inputFiles.extend([self.schemaBuilder.getExecutor(),
+ self.schemaBuilder.getPointEval()])
+ self.params.salome_parameters.job_file = self.schemaBuilder.getMainJob()
+ #schema_path, extra_files, config_info = self.schemaBuilder.buildSchema(result_directory)
+
+ # export config
+ configpath = os.path.join(result_directory, "idefixconfig.json")
+ dicconfig = {}
+ dicconfig["nbbranches"] = self.params.nb_branches
+ dicconfig["studymodule"] = "idefixstudy"
+ dicconfig["sampleIterator"] = self.sampleManager.getModuleName()
+ dicconfig["params"] = params_dic
+ dicconfig["plugin"] = self.schemaBuilder.getPluginName()
+ with open(configpath, "w") as f:
+ json.dump(dicconfig, f, indent=2)
+ studypath = os.path.join(result_directory, "idefixstudy.py")
+ with open(studypath, "w") as f:
+ f.write(script.script)
+
+ inputFiles.extend([configpath, studypath])
+
+ # this list manipulation is needed because in_files is not a python list
+ # if we don't use a salome session. In that case swig uses a python tuple
+ # in order to map a std::list as a parameter of a structure.
+ in_files_as_list = list(self.params.salome_parameters.in_files)
+ self.params.salome_parameters.in_files = in_files_as_list + inputFiles
+ launcher = salome_proxy.getLauncher()
+ self.job_id = launcher.createJob(self.params.salome_parameters)
+ return self.job_id
+
+ def jobType(self):
+ return "python_salome"
--- /dev/null
+SET(SCHEMA_FILES
+ executor.py
+ mainjob.py
+ pointeval.py
+ )
+
+INSTALL(FILES ${SCHEMA_FILES} DESTINATION ${SALOME_INSTALL_PYTHON}/pydefx/noyacs)
--- /dev/null
+#import pydefx
+import os
+import pickle
+import time
+import traceback
+import subprocess
+
+class Context:
+ def __init__(self):
+ #self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
+ pass
+
+class JobExecutor:
+ def __init__(self, config):
+ self.config = config
+
+ def initialize(self):
+ """
+ Execute prescript.
+ """
+ pointeval = os.path.join(os.getcwd(), "pointeval.py")
+ os.chmod(pointeval, 0o755)
+
+ def evaluate(self, idx, point):
+ """ This is executed for every point to be evaluated.
+ """
+ context = Context()
+ error = None
+ out_values = None
+ try:
+ self.prepare(idx, point, context)
+ if self.noRunFound(idx, point, context):
+ self.runjob(idx, point, context)
+ error, out_values = self.getResult(context)
+ except Exception as e:
+ error = str(e)
+ traceback.print_exc()
+ return error, out_values
+
+ def prepare(self, idx, point, context):
+ """
+ Define local and remote work directory.
+ Define job script.
+ """
+ root_dir = os.getcwd()
+ point_name = "job_"+str(idx)
+ context.local_dir = os.path.join(root_dir, point_name)
+ if not os.path.exists(context.local_dir):
+ os.mkdir(context.local_dir)
+ # export the point to a file
+ data_file_name = "idefixdata.csv"
+ data_file_path = os.path.join(context.local_dir, data_file_name)
+ with open(data_file_path, "w") as f:
+ # explicit dict convertion is needed for compatibility between python versions
+ f.write(repr(dict(point)))
+
+
+ def noRunFound(self, idx, point, context):
+ return True
+
+ def runjob(self, idx, point, context):
+ """
+ Create, launch and wait for the end of the job.
+ """
+ # srun
+ pointeval = os.path.join(os.getcwd(), "pointeval.py")
+ command = "srun --ntasks=1 --nodes=1 --chdir={} {} ".format(context.local_dir,
+ pointeval)
+ return_code = subprocess.call(command, shell=True)
+
+ def getResult(self, context):
+ """
+ Check the job state, fetch the result file.
+ """
+ error_file = os.path.join(context.local_dir, "idefixerror.txt")
+ result_file = os.path.join(context.local_dir, "idefixresult.txt")
+ with open(error_file, "r") as f:
+ error = f.read()
+ with open(result_file, "r") as f:
+ result_str = f.read()
+ result = eval(result_str)
+
+ return error, result
+
+def createExecutor(config):
+ return JobExecutor(config)
--- /dev/null
+#! /usr/bin/env python3
+import json
+import importlib
+from multiprocessing.dummy import Pool
+import traceback
+
+class StartJob:
+ def __init__(self, executor):
+ self.executor = executor
+
+ def __call__(self, idx, in_values):
+ error=None
+ out_values=None
+ try:
+ error, out_values = self.executor.evaluate(idx, in_values)
+ except Exception as e:
+ error=str(e)
+ traceback.print_exc()
+ return idx, in_values, out_values, error
+
+class TerminateJob:
+ def __init__(self, manager):
+ self.manager = manager
+
+ def __call__(self, result):
+ # without try statement we may experience deadlock in case of error.
+ try:
+ idx, in_values, out_values, error = result
+ if not error:
+ error = None
+ self.manager.addResult(idx, in_values, out_values, error)
+ except Exception as e:
+ traceback.print_exc()
+
+if __name__ == '__main__':
+ with open("idefixconfig.json", "r") as f:
+ config = json.load(f)
+ plugin_module = importlib.import_module(config["plugin"])
+ executor = plugin_module.createExecutor(config)
+ # global initialization - commun work for every evaluation.
+ executor.initialize()
+
+ itModuleName = config["sampleIterator"]
+ itModule = importlib.import_module(itModuleName)
+ sampleManager = itModule.SampleIterator()
+ sampleManager.writeHeaders()
+
+ nbbranches=config["nbbranches"]
+ pool = Pool(nbbranches)
+ runPoint = StartJob(executor)
+ endOk = TerminateJob(sampleManager)
+ for point in sampleManager:
+ pool.apply_async(runPoint, point, callback=endOk)
+ pool.close()
+ pool.join()
+ sampleManager.terminate()
--- /dev/null
+#! /usr/bin/env python3
+import traceback
+import os
+
+data_file_name = "idefixdata.csv"
+study_module = "idefixstudy.py"
+error_result = "idefixerror.txt"
+value_result = "idefixresult.txt"
+traceback_result = "idefixtraceback.txt"
+
+with open(data_file_name, "r") as f:
+ values = f.read()
+inputvals = eval(values)
+
+error=""
+result=None
+old_dir = os.getcwd()
+
+try:
+ os.chdir("..") # go to commun root directory
+ with open(study_module, "r") as study_file:
+ study_string = study_file.read()
+ exec(study_string)
+ result = _exec(**inputvals)
+except Exception as e:
+ error=str(e)
+ os.chdir(old_dir) # back to the current case job directory
+ with open(traceback_result, "w") as f:
+ traceback.print_exc(file=f)
+
+os.chdir(old_dir) # back to the current case job directory
+
+with open(error_result, "w") as f:
+ f.write(error)
+
+with open(value_result, "w") as f:
+ f.write(repr(result))
--- /dev/null
+# -*- coding: utf-8 -*-
+# Copyright (C) 2019 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+import inspect
+import pathlib
+import os
+
+class NoYacsBuilder:
+ def __init__(self, executor = None, pointEval = None, mainJob = None):
+ filename = inspect.getframeinfo(inspect.currentframe()).filename
+ install_root_directory = pathlib.Path(filename).resolve().parent
+ install_files_directory = os.path.join(install_root_directory, "noyacs")
+
+ if executor is None:
+ executor = os.path.join(install_files_directory, "executor.py")
+ self.executor = executor
+
+ if pointEval is None:
+ pointEval = os.path.join(install_files_directory, "pointeval.py")
+ self.pointEval = pointEval
+
+ if mainJob is None:
+ mainJob = os.path.join(install_files_directory, "mainjob.py")
+ self.mainJob = mainJob
+
+ def getMainJob(self):
+ return self.mainJob
+
+ def getExecutor(self):
+ return self.executor
+
+ def getPointEval(self):
+ return self.pointEval
+
+ def getPluginName(self):
+ basename = os.path.basename(self.executor)
+ if not basename.endswith(".py"):
+ raise Exception("File name {} does not end with '.py'.".format(
+ self.executor))
+ return basename[:-3]
--- /dev/null
+# -*- coding: utf-8 -*-
+# Copyright (C) 2019 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+import copy
+import os
+import json
+from . import pystudy
+from . import noyacsbuilder
+from . import salome_proxy
+
+
+class NoYacsStudy(pystudy.PyStudy):
+ def __init__(self, sampleManager=None, schemaBuilder=None):
+ if schemaBuilder is None:
+ schemaBuilder = noyacsbuilder.NoYacsBuilder()
+ super().__init__(sampleManager, schemaBuilder)
+
+ def createNewJob(self, script, sample, params):
+ # TODO: modifier le copier/coller
+ self._check(script,sample)
+ self.sample = sample
+ self.params = copy.deepcopy(params)
+ main_job_work_dir = self.params.salome_parameters.result_directory
+ if not os.path.exists(main_job_work_dir):
+ os.makedirs(main_job_work_dir)
+ # set the parameters of the local job
+ self.params.salome_parameters.job_type = self.jobType()
+
+ result_directory = self.params.salome_parameters.result_directory
+ # export sample to result_directory
+ inputFiles = self.sampleManager.prepareRun(self.sample, result_directory)
+ inputFiles.extend([self.schemaBuilder.getExecutor(),
+ self.schemaBuilder.getPointEval()])
+ self.params.salome_parameters.job_file = self.schemaBuilder.getMainJob()
+
+ # export config
+ configpath = os.path.join(result_directory, "idefixconfig.json")
+ dicconfig = {}
+ dicconfig["nbbranches"] = self.params.nb_branches
+ dicconfig["studymodule"] = "idefixstudy"
+ dicconfig["sampleIterator"] = self.sampleManager.getModuleName()
+ dicconfig["plugin"] = self.schemaBuilder.getPluginName()
+ with open(configpath, "w") as f:
+ json.dump(dicconfig, f, indent=2)
+ studypath = os.path.join(result_directory, "idefixstudy.py")
+ with open(studypath, "w") as f:
+ f.write(script.script)
+
+ inputFiles.extend([configpath, studypath])
+
+ # this list manipulation is needed because in_files is not a python list
+ # if we don't use a salome session. In that case swig uses a python tuple
+ # in order to map a std::list as a parameter of a structure.
+ in_files_as_list = list(self.params.salome_parameters.in_files)
+ self.params.salome_parameters.in_files = in_files_as_list + inputFiles
+ launcher = salome_proxy.getLauncher()
+ self.job_id = launcher.createJob(self.params.salome_parameters)
+ return self.job_id
+
+ def jobType(self):
+ return "command_salome"
newobj.salome_parameters.mem_per_cpu = self.salome_parameters.mem_per_cpu
newobj.salome_parameters.wckey = self.salome_parameters.wckey
newobj.salome_parameters.extra_params = self.salome_parameters.extra_params
- newobj.salome_parameters.specific_parameters = self.salome_parameters.specific_parameters
+ #newobj.salome_parameters.specific_parameters = self.salome_parameters.specific_parameters
newobj.salome_parameters.resource_required.name = self.salome_parameters.resource_required.name
newobj.salome_parameters.resource_required.hostname = self.salome_parameters.resource_required.hostname
newobj.salome_parameters.resource_required.can_launch_batch_jobs = self.salome_parameters.resource_required.can_launch_batch_jobs
newobj.salome_parameters.resource_required.nb_proc_per_node = self.salome_parameters.resource_required.nb_proc_per_node
return newobj
+
+ def dumpDict(self):
+ """Create a dictionary with all the properties.
+ Can be used for serialization with json."""
+ newdict = {
+ "nb_branches" : self.nb_branches,
+ "salome_parameters" : {
+ "job_name" : self.salome_parameters.job_name,
+ "job_type" : self.salome_parameters.job_type,
+ "job_file" : self.salome_parameters.job_file,
+ "pre_command" : self.salome_parameters.pre_command,
+ "env_file" : self.salome_parameters.env_file,
+ "in_files" : list(self.salome_parameters.in_files),
+ "out_files" : list(self.salome_parameters.out_files),
+ "work_directory" : self.salome_parameters.work_directory,
+ "local_directory" : self.salome_parameters.local_directory,
+ "result_directory" : self.salome_parameters.result_directory,
+ "maximum_duration" : self.salome_parameters.maximum_duration,
+ "queue" : self.salome_parameters.queue,
+ "partition" : self.salome_parameters.partition,
+ "exclusive" : self.salome_parameters.exclusive,
+ "mem_per_cpu" : self.salome_parameters.mem_per_cpu,
+ "wckey" : self.salome_parameters.wckey,
+ "extra_params" : self.salome_parameters.extra_params,
+ #"specific_parameters" : str(self.salome_parameters.specific_parameters),
+ "resource_required" : {
+ "name" : self.salome_parameters.resource_required.name,
+ "hostname" : self.salome_parameters.resource_required.hostname,
+ "can_launch_batch_jobs" : self.salome_parameters.resource_required.can_launch_batch_jobs,
+ "can_run_containers" : self.salome_parameters.resource_required.can_run_containers,
+ "OS" : self.salome_parameters.resource_required.OS,
+ "nb_proc" : self.salome_parameters.resource_required.nb_proc,
+ "mem_mb" : self.salome_parameters.resource_required.mem_mb,
+ "cpu_clock" : self.salome_parameters.resource_required.cpu_clock,
+ "nb_node" : self.salome_parameters.resource_required.nb_node,
+ "nb_proc_per_node" : self.salome_parameters.resource_required.nb_proc_per_node
+ }
+ }
+ }
+ return newdict
+
+ def loadDict(self, dico):
+ self.nb_branches = dico["nb_branches"]
+ #self.salome_parameters = salome_proxy.createSalomeParameters()
+ self.salome_parameters.job_name = dico["salome_parameters"]["job_name"]
+ self.salome_parameters.job_type = dico["salome_parameters"]["job_type"]
+ self.salome_parameters.job_file = dico["salome_parameters"]["job_file"]
+ self.salome_parameters.pre_command = dico["salome_parameters"]["pre_command"]
+ self.salome_parameters.env_file = dico["salome_parameters"]["env_file"]
+ self.salome_parameters.in_files = dico["salome_parameters"]["in_files"]
+ self.salome_parameters.out_files = dico["salome_parameters"]["out_files"]
+ self.salome_parameters.work_directory = dico["salome_parameters"]["work_directory"]
+ self.salome_parameters.local_directory = dico["salome_parameters"]["local_directory"]
+ self.salome_parameters.result_directory = dico["salome_parameters"]["result_directory"]
+ self.salome_parameters.maximum_duration = dico["salome_parameters"]["maximum_duration"]
+ self.salome_parameters.queue = dico["salome_parameters"]["queue"]
+ self.salome_parameters.partition = dico["salome_parameters"]["partition"]
+ self.salome_parameters.exclusive = dico["salome_parameters"]["exclusive"]
+ self.salome_parameters.mem_per_cpu = dico["salome_parameters"]["mem_per_cpu"]
+ self.salome_parameters.wckey = dico["salome_parameters"]["wckey"]
+ self.salome_parameters.extra_params = dico["salome_parameters"]["extra_params"]
+ self.salome_parameters.resource_required.name = dico["salome_parameters"]["resource_required"]["name"]
+ self.salome_parameters.resource_required.hostname = dico["salome_parameters"]["resource_required"]["hostname"]
+ self.salome_parameters.resource_required.can_launch_batch_jobs = dico["salome_parameters"]["resource_required"]["can_launch_batch_jobs"]
+ self.salome_parameters.resource_required.can_run_containers = dico["salome_parameters"]["resource_required"]["can_run_containers"]
+ self.salome_parameters.resource_required.OS = dico["salome_parameters"]["resource_required"]["OS"]
+ self.salome_parameters.resource_required.nb_proc = dico["salome_parameters"]["resource_required"]["nb_proc"]
+ self.salome_parameters.resource_required.mem_mb = dico["salome_parameters"]["resource_required"]["mem_mb"]
+ self.salome_parameters.resource_required.cpu_clock = dico["salome_parameters"]["resource_required"]["cpu_clock"]
+ self.salome_parameters.resource_required.nb_node = dico["salome_parameters"]["resource_required"]["nb_node"]
+ self.salome_parameters.resource_required.nb_proc_per_node = dico["salome_parameters"]["resource_required"]["nb_proc_per_node"]
def writeHeaders(self):
"""
- This function can be called after initInputIterator and before the first
- call to addResult in order to write the names of the parameters in the
- result file.
+ This function can be called before the first call to addResult in order to
+ write the names of the parameters in the result file.
"""
if self.directory:
outputnamespath = os.path.join(self.directory,
def addResult(self, currentId, currentInput, currentOutput, currentError):
"""
- You need to call initInputIterator and writeHeaders before the first call
- of this function.
+ You need to call writeHeaders before the first call of this function.
currentId : int value
currentInput : dictionary {"input name":value}
currentOutput : result returned by _exec. Can be a tuple, a simple value or