#
SET(SCRIPTS
__init__.py
+ allpurposebuilder.py
configuration.py
parameters.py
pyscript.py
multijobstudy.py
slurmbuilder.py
slurmstudy.py
+ localbuilder.py
+ localstudy.py
+ execconfiguration.py
)
INSTALL(FILES ${SCRIPTS} DESTINATION ${SALOME_INSTALL_PYTHON}/pydefx)
ADD_SUBDIRECTORY(schemas)
-ADD_SUBDIRECTORY(multijob)
-ADD_SUBDIRECTORY(slurm)
+ADD_SUBDIRECTORY(plugins)
+#ADD_SUBDIRECTORY(multijob)
+#ADD_SUBDIRECTORY(slurm)
from .pystudy import PyStudy
from .sample import Sample
from .defaultschemabuilder import DefaultSchemaBuilder
+from .allpurposebuilder import AllPurposeBuilder
+from .localbuilder import LocalBuilder
+from .multijobbuilder import MultiJobBuilder
+from .slurmbuilder import SlurmBuilder
from .salome_proxy import forceSalomeServers, forceNoSalomeServers
from .multijobstudy import MultiJobStudy
from .slurmstudy import SlurmStudy
+from .localstudy import LocalStudy
--- /dev/null
+# -*- coding: utf-8 -*-
+# Copyright (C) 2019 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+import inspect
+import pathlib
+import os
+
+class AllPurposeBuilder:
+ def __init__(self, executor = None, pointEval = None, mainJob = None):
+ filename = inspect.getframeinfo(inspect.currentframe()).filename
+ install_root_directory = pathlib.Path(filename).resolve().parent
+ install_files_directory = os.path.join(install_root_directory, "plugins")
+
+ if executor is None:
+ raise TypeError("Parameter executor should not be None.")
+ self.executor = executor
+
+ if pointEval is None:
+ pointEval = os.path.join(install_files_directory, "pointeval.py")
+ self.pointEval = pointEval
+
+ if mainJob is None:
+ mainJob = os.path.join(install_files_directory, "mainjob.py")
+ self.mainJob = mainJob
+
+ def getMainJob(self):
+ return self.mainJob
+
+ def getExecutor(self):
+ return self.executor
+
+ def getPointEval(self):
+ return self.pointEval
+
+ def getPluginName(self):
+ basename = os.path.basename(self.executor)
+ if not basename.endswith(".py"):
+ raise ValueError("File name {} does not end with '.py'.".format(
+ self.executor))
+ return basename[:-3]
--- /dev/null
+# -*- coding: utf-8 -*-
+# Copyright (C) 2019 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+import json
+from . import parameters
+
+def SalomeParameters():
+ """
+ This function can be called during the evaluation of a point in order to get
+ the parameters of the job.
+ """
+ result = None
+ try:
+ with open("idefixconfig.json", "r") as f:
+ config = json.load(f)
+ params = parameters.Parameters()
+ params.loadDict(config["params"])
+ result = params.salome_parameters
+ except:
+ result = None
+ return result
+
+def GetConfig():
+ with open("idefixconfig.json", "r") as f:
+ config = json.load(f)
+ return config
--- /dev/null
+# -*- coding: utf-8 -*-
+# Copyright (C) 2019 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+import inspect
+import pathlib
+import os
+from .allpurposebuilder import AllPurposeBuilder
+
+class LocalBuilder(AllPurposeBuilder):
+ def __init__(self, executor = None, pointEval = None, mainJob = None):
+ filename = inspect.getframeinfo(inspect.currentframe()).filename
+ install_root_directory = pathlib.Path(filename).resolve().parent
+ install_files_directory = os.path.join(install_root_directory, "plugins")
+ if executor is None:
+ executor = os.path.join(install_files_directory, "localexecutor.py")
+ elif executor == "localexecutor" or executor == "localexecutor.py":
+ executor = os.path.join(install_files_directory, "localexecutor.py")
+ elif executor == "lightexecutor" or executor == "lightexecutor.py":
+ executor = os.path.join(install_files_directory, "lightexecutor.py")
+ super().__init__(executor, pointEval, mainJob)
+
--- /dev/null
+# -*- coding: utf-8 -*-
+# Copyright (C) 2019 EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+import copy
+import os
+import json
+from . import pystudy
+from . import localbuilder
+from . import salome_proxy
+
+
+class LocalStudy(pystudy.PyStudy):
+ """
+ This study uses one different job for each evaluation.
+ """
+ def __init__(self, sampleManager=None, schemaBuilder=None):
+ if schemaBuilder is None:
+ schemaBuilder = localbuilder.LocalBuilder()
+ super().__init__(sampleManager, schemaBuilder)
+
+ def createNewJob(self, script, sample, params):
+ self._check(script,sample)
+ self.sample = sample
+ self.params = copy.deepcopy(params)
+ main_job_work_dir = os.path.join(
+ self.params.salome_parameters.work_directory,
+ "idefixjob")
+ # dump the remote jobs parameters to the configuration file
+ params_dic = params.dumpDict()
+ # modify the parameters for the local loop job
+ self.params.salome_parameters.resource_required.name = "localhost"
+ self.params.salome_parameters.job_type = "command_salome" #"python_salome"
+ self.params.salome_parameters.work_directory = main_job_work_dir
+ self.params.createTmpResultDirectory()
+ result_directory = self.params.salome_parameters.result_directory
+ # export sample to result_directory
+ inputFiles = self.sampleManager.prepareRun(self.sample, result_directory)
+ inputFiles.extend([self.schemaBuilder.getExecutor(),
+ self.schemaBuilder.getPointEval()])
+ self.params.salome_parameters.job_file = self.schemaBuilder.getMainJob()
+
+ # export config
+ configpath = os.path.join(result_directory, "idefixconfig.json")
+ dicconfig = {}
+ dicconfig["nbbranches"] = self.params.nb_branches
+ dicconfig["studymodule"] = "idefixstudy"
+ dicconfig["sampleIterator"] = self.sampleManager.getModuleName()
+ dicconfig["params"] = params_dic
+ dicconfig["plugin"] = self.schemaBuilder.getPluginName()
+ with open(configpath, "w") as f:
+ json.dump(dicconfig, f, indent=2)
+ studypath = os.path.join(result_directory, "idefixstudy.py")
+ with open(studypath, "w") as f:
+ f.write(script.script)
+
+ inputFiles.extend([configpath, studypath])
+
+ # this list manipulation is needed because in_files is not a python list
+ # if we don't use a salome session. In that case swig uses a python tuple
+ # in order to map a std::list as a parameter of a structure.
+ in_files_as_list = list(self.params.salome_parameters.in_files)
+ self.params.salome_parameters.in_files = in_files_as_list + inputFiles
+ launcher = salome_proxy.getLauncher()
+ self.job_id = launcher.createJob(self.params.salome_parameters)
+ return self.job_id
+
+ def jobType(self):
+ return "command_salome"
+++ /dev/null
-SET(SCHEMA_FILES
- executor.py
- mainjob.py
- pointeval.py
- )
-
-INSTALL(FILES ${SCHEMA_FILES} DESTINATION ${SALOME_INSTALL_PYTHON}/pydefx/multijob)
+++ /dev/null
-import pydefx
-import os
-import pickle
-import time
-import traceback
-
-class Context:
- def __init__(self):
- self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
- pass
-
-class JobExecutor:
- def __init__(self, config):
- self.config = config
-
- def initialize(self):
- """ This is executed before the first evaluation.
- Put here global processing needed by all the evaluations like the copy of
- commun files.
- """
- # Copy the commun files to the root work directory
- params = pydefx.Parameters() # global parameters
- params.loadDict(self.config["params"])
- # use a fake empty command.
- # Using launcher to copy some files on the remote file system,
- # without launching a job.
- command = os.path.join(os.getcwd(), "empty.sh")
- open(command, "w").close()
- params.salome_parameters.job_file = command
- params.salome_parameters.job_type = "command"
- study_module = os.path.join(os.getcwd(), self.config["studymodule"]+".py")
- infiles = list(params.salome_parameters.in_files)
- params.salome_parameters.in_files = infiles + [study_module]
- launcher = pydefx.salome_proxy.getLauncher()
- job_id = launcher.createJob(params.salome_parameters)
- launcher.exportInputFiles(job_id)
-
- def evaluate(self, idx, point):
- """ This is executed for every point to be evaluated.
- """
- context = Context()
- error = None
- out_values = None
- try:
- self.prepare(idx, point, context)
- if self.noRunFound(idx, point, context):
- self.runjob(idx, point, context)
- error, out_values = self.getResult(context)
- except Exception as e:
- error = str(e)
- traceback.print_exc()
- return error, out_values
-
- def prepare(self, idx, point, context):
- """
- Define local and remote work directory.
- Define job script.
- """
- context.params = pydefx.Parameters()
- context.params.loadDict(self.config["params"])
- salome_parameters = context.params.salome_parameters
- root_local_dir = salome_parameters.result_directory
- root_remote_dir = salome_parameters.work_directory
- input_files = [] # commun files are already copied to the root directory
- point_name = "job_"+str(idx)
- context.local_dir = os.path.join(root_local_dir, point_name)
- point_remote_dir = os.path.join(root_remote_dir, point_name)
- if not os.path.exists(context.local_dir):
- os.mkdir(context.local_dir)
- # export the point to a file
- data_file_name = "idefixdata.csv"
- data_file_path = os.path.join(context.local_dir, data_file_name)
- with open(data_file_path, "w") as f:
- # explicit dict convertion is needed for compatibility between python versions
- f.write(repr(dict(point)))
- input_files.append(data_file_path)
-
- #command_path = os.path.join(root_local_dir, "command.py")
- #salome_parameters.job_type = "command_salome"
- #salome_parameters.job_file = command_path
-
- salome_parameters.in_files = input_files
- salome_parameters.out_files = ["idefixresult.txt", "idefixerror.txt"]
- salome_parameters.work_directory = point_remote_dir
- salome_parameters.result_directory = context.local_dir
-
- def noRunFound(self, idx, point, context):
- return True
-
- def runjob(self, idx, point, context):
- """
- Create, launch and wait for the end of the job.
- """
- import random
- sleep_delay = random.randint(5, 15) #10
- #launcher = pydefx.salome_proxy.getLauncher()
- launcher = context.launcher
- context.job_id = launcher.createJob(context.params.salome_parameters)
- launcher.launchJob(context.job_id)
- jobState = launcher.getJobState(context.job_id)
- while jobState=="QUEUED" or jobState=="IN_PROCESS" or jobState=="RUNNING" :
- time.sleep(sleep_delay)
- jobState = launcher.getJobState(context.job_id)
-
- def getResult(self, context):
- """
- Check the job state, fetch the result file.
- """
- #launcher = pydefx.salome_proxy.getLauncher()
- launcher = context.launcher
- jobState = launcher.getJobState(context.job_id)
- error=""
- result=None
- if jobState != "FINISHED" :
- error = "Job has not finished correctly."
- else:
- launcher.getJobResults(context.job_id, "")
- error_file = os.path.join(context.local_dir, "idefixerror.txt")
- result_file = os.path.join(context.local_dir, "idefixresult.txt")
- with open(error_file, "r") as f:
- error = f.read()
- with open(result_file, "r") as f:
- result_str = f.read()
- result = eval(result_str)
-
- return error, result
-
-def createExecutor(config):
- return JobExecutor(config)
+++ /dev/null
-#! /usr/bin/env python3
-import json
-import importlib
-from multiprocessing import Pool
-import traceback
-
-class StartJob:
- def __init__(self, executor):
- self.executor = executor
-
- def __call__(self, idx, in_values):
- error=None
- out_values=None
- try:
- error, out_values = self.executor.evaluate(idx, in_values)
- except Exception as e:
- error=str(e)
- traceback.print_exc()
- return idx, in_values, out_values, error
-
-class TerminateJob:
- def __init__(self, manager):
- self.manager = manager
-
- def __call__(self, result):
- # without try statement we may experience deadlock in case of error.
- try:
- idx, in_values, out_values, error = result
- if not error:
- error = None
- self.manager.addResult(idx, in_values, out_values, error)
- except Exception as e:
- traceback.print_exc()
-
-if __name__ == '__main__':
- with open("idefixconfig.json", "r") as f:
- config = json.load(f)
- plugin_module = importlib.import_module(config["plugin"])
- executor = plugin_module.createExecutor(config)
- # global initialization - commun work for every evaluation.
- executor.initialize()
-
- itModuleName = config["sampleIterator"]
- itModule = importlib.import_module(itModuleName)
- sampleManager = itModule.SampleIterator()
- sampleManager.writeHeaders()
-
- nbbranches=config["nbbranches"]
- pool = Pool(nbbranches)
- runPoint = StartJob(executor)
- endOk = TerminateJob(sampleManager)
- for point in sampleManager:
- pool.apply_async(runPoint, point, callback=endOk)
- pool.close()
- pool.join()
- sampleManager.terminate()
+++ /dev/null
-#! /usr/bin/env python3
-import traceback
-import os
-
-data_file_name = "idefixdata.csv"
-study_module = "idefixstudy.py"
-error_result = "idefixerror.txt"
-value_result = "idefixresult.txt"
-traceback_result = "idefixtraceback.txt"
-
-with open(data_file_name, "r") as f:
- values = f.read()
-inputvals = eval(values)
-
-error=""
-result=None
-old_dir = os.getcwd()
-
-try:
- os.chdir("..") # go to commun root directory
- with open(study_module, "r") as study_file:
- study_string = study_file.read()
- exec(study_string)
- result = _exec(**inputvals)
-except Exception as e:
- error=str(e)
- os.chdir(old_dir) # back to the current case job directory
- with open(traceback_result, "w") as f:
- traceback.print_exc(file=f)
-
-os.chdir(old_dir) # back to the current case job directory
-
-with open(error_result, "w") as f:
- f.write(error)
-
-with open(value_result, "w") as f:
- f.write(repr(result))
import inspect
import pathlib
import os
+from .allpurposebuilder import AllPurposeBuilder
-class MultiJobBuilder:
+class MultiJobBuilder(AllPurposeBuilder):
def __init__(self, executor = None, pointEval = None, mainJob = None):
filename = inspect.getframeinfo(inspect.currentframe()).filename
install_root_directory = pathlib.Path(filename).resolve().parent
- install_files_directory = os.path.join(install_root_directory, "multijob")
+ install_files_directory = os.path.join(install_root_directory, "plugins")
if executor is None:
- executor = os.path.join(install_files_directory, "executor.py")
- self.executor = executor
-
- if pointEval is None:
- pointEval = os.path.join(install_files_directory, "pointeval.py")
- self.pointEval = pointEval
-
- if mainJob is None:
- mainJob = os.path.join(install_files_directory, "mainjob.py")
- self.mainJob = mainJob
-
- def getMainJob(self):
- return self.mainJob
-
- def getExecutor(self):
- return self.executor
-
- def getPointEval(self):
- return self.pointEval
-
- def getPluginName(self):
- basename = os.path.basename(self.executor)
- if not basename.endswith(".py"):
- raise Exception("File name {} does not end with '.py'.".format(
- self.executor))
- return basename[:-3]
+ executor = os.path.join(install_files_directory, "jobexecutor.py")
+ super().__init__(executor, pointEval, mainJob)
inputFiles.extend([self.schemaBuilder.getExecutor(),
self.schemaBuilder.getPointEval()])
self.params.salome_parameters.job_file = self.schemaBuilder.getMainJob()
- #schema_path, extra_files, config_info = self.schemaBuilder.buildSchema(result_directory)
# export config
configpath = os.path.join(result_directory, "idefixconfig.json")
return self.job_id
def jobType(self):
- return "python_salome"
+ return "command_salome"
--- /dev/null
+SET(SCHEMA_FILES
+ jobexecutor.py
+ lightexecutor.py
+ localexecutor.py
+ srunexecutor.py
+ mainjob.py
+ pointeval.py
+ )
+
+INSTALL(FILES ${SCHEMA_FILES} DESTINATION ${SALOME_INSTALL_PYTHON}/pydefx/plugins)
--- /dev/null
+import pydefx
+import os
+import pickle
+import time
+import traceback
+
+class Context:
+ def __init__(self):
+ self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
+ pass
+
+class JobExecutor:
+ def __init__(self, config):
+ self.config = config
+
+ def initialize(self):
+ """ This is executed before the first evaluation.
+ Put here global processing needed by all the evaluations like the copy of
+ commun files.
+ """
+ # Copy the commun files to the root work directory
+ params = pydefx.Parameters() # global parameters
+ params.loadDict(self.config["params"])
+ # use a fake empty command.
+ # Using launcher to copy some files on the remote file system,
+ # without launching a job.
+ command = os.path.join(os.getcwd(), "empty.sh")
+ open(command, "w").close()
+ params.salome_parameters.job_file = command
+ params.salome_parameters.job_type = "command"
+ study_module = os.path.join(os.getcwd(), self.config["studymodule"]+".py")
+ infiles = list(params.salome_parameters.in_files)
+ params.salome_parameters.in_files = infiles + [study_module]
+ launcher = pydefx.salome_proxy.getLauncher()
+ job_id = launcher.createJob(params.salome_parameters)
+ launcher.exportInputFiles(job_id)
+
+ def evaluate(self, idx, point):
+ """ This is executed for every point to be evaluated.
+ """
+ context = Context()
+ error = None
+ out_values = None
+ try:
+ self.prepare(idx, point, context)
+ if self.noRunFound(idx, point, context):
+ self.runjob(idx, point, context)
+ error, out_values = self.getResult(context)
+ except Exception as e:
+ error = str(e)
+ traceback.print_exc()
+ return error, out_values
+
+ def prepare(self, idx, point, context):
+ """
+ Define local and remote work directory.
+ Define job script.
+ """
+ context.params = pydefx.Parameters()
+ context.params.loadDict(self.config["params"])
+ salome_parameters = context.params.salome_parameters
+ root_local_dir = salome_parameters.result_directory
+ root_remote_dir = salome_parameters.work_directory
+ input_files = [] # commun files are already copied to the root directory
+ point_name = "job_"+str(idx)
+ context.local_dir = os.path.join(root_local_dir, point_name)
+ point_remote_dir = os.path.join(root_remote_dir, point_name)
+ if not os.path.exists(context.local_dir):
+ os.mkdir(context.local_dir)
+ # export the point to a file
+ data_file_name = "idefixdata.csv"
+ data_file_path = os.path.join(context.local_dir, data_file_name)
+ with open(data_file_path, "w") as f:
+ # explicit dict convertion is needed for compatibility between python versions
+ f.write(repr(dict(point)))
+ input_files.append(data_file_path)
+
+ #command_path = os.path.join(root_local_dir, "command.py")
+ #salome_parameters.job_type = "command_salome"
+ #salome_parameters.job_file = command_path
+
+ salome_parameters.in_files = input_files
+ salome_parameters.out_files = ["idefixresult.txt", "idefixerror.txt"]
+ salome_parameters.work_directory = point_remote_dir
+ salome_parameters.result_directory = context.local_dir
+
+ def noRunFound(self, idx, point, context):
+ return True
+
+ def runjob(self, idx, point, context):
+ """
+ Create, launch and wait for the end of the job.
+ """
+ import random
+ sleep_delay = random.randint(5, 15) #10
+ #launcher = pydefx.salome_proxy.getLauncher()
+ launcher = context.launcher
+ context.job_id = launcher.createJob(context.params.salome_parameters)
+ launcher.launchJob(context.job_id)
+ jobState = launcher.getJobState(context.job_id)
+ while jobState=="QUEUED" or jobState=="IN_PROCESS" or jobState=="RUNNING" :
+ time.sleep(sleep_delay)
+ jobState = launcher.getJobState(context.job_id)
+
+ def getResult(self, context):
+ """
+ Check the job state, fetch the result file.
+ """
+ #launcher = pydefx.salome_proxy.getLauncher()
+ launcher = context.launcher
+ jobState = launcher.getJobState(context.job_id)
+ error=""
+ result=None
+ if jobState != "FINISHED" :
+ error = "Job has not finished correctly."
+ else:
+ launcher.getJobResults(context.job_id, "")
+ error_file = os.path.join(context.local_dir, "idefixerror.txt")
+ result_file = os.path.join(context.local_dir, "idefixresult.txt")
+ with open(error_file, "r") as f:
+ error = f.read()
+ with open(result_file, "r") as f:
+ result_str = f.read()
+ result = eval(result_str)
+
+ return error, result
+
+def createExecutor(config):
+ return JobExecutor(config)
--- /dev/null
+#import pydefx
+import os
+import pickle
+import time
+import traceback
+import subprocess
+
+class Context:
+ def __init__(self):
+ #self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
+ pass
+
+class JobExecutor:
+ def __init__(self, config):
+ self.config = config
+
+ def initialize(self):
+ """ This is executed before the first evaluation.
+ Put here global processing needed by all the evaluations like the copy of
+ commun files.
+ """
+ pass
+
+ def evaluate(self, idx, point):
+ """ This is executed for every point to be evaluated.
+ """
+ context = Context()
+ error = None
+ out_values = None
+ studymodule=self.config["studymodule"]
+ #studymodule += ".py"
+ #with open(studymodule, "r") as study_file:
+ #study_string = study_file.read()
+ #try:
+ #exec(study_string)
+ #out_values = _exec(**inputvals)
+ #except Exception as e:
+ #error=str(e)
+ #traceback.print_exc()
+ # another way:
+ import importlib
+ try:
+ idefixstudy=importlib.import_module(studymodule)
+ out_values=idefixstudy._exec(**point)
+ except Exception as e:
+ error=str(e)
+ traceback.print_exc()
+ return error, out_values
+
+
+def createExecutor(config):
+ return JobExecutor(config)
--- /dev/null
+#import pydefx
+import os
+import pickle
+import time
+import traceback
+import subprocess
+
+class Context:
+ def __init__(self):
+ #self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
+ pass
+
+class JobExecutor:
+ def __init__(self, config):
+ self.config = config
+
+ def initialize(self):
+ """ This is executed before the first evaluation.
+ Put here global processing needed by all the evaluations like the copy of
+ commun files.
+ """
+ pointeval = os.path.join(os.getcwd(), "pointeval.py")
+ os.chmod(pointeval, 0o755)
+
+ def evaluate(self, idx, point):
+ """ This is executed for every point to be evaluated.
+ """
+ context = Context()
+ error = None
+ out_values = None
+ try:
+ self.prepare(idx, point, context)
+ if self.noRunFound(idx, point, context):
+ self.runjob(idx, point, context)
+ error, out_values = self.getResult(context)
+ except Exception as e:
+ error = str(e)
+ traceback.print_exc()
+ return error, out_values
+
+ def prepare(self, idx, point, context):
+ """
+ Define local and remote work directory.
+ Define job script.
+ """
+ root_dir = os.getcwd()
+ point_name = "job_"+str(idx)
+ context.local_dir = os.path.join(root_dir, point_name)
+ if not os.path.exists(context.local_dir):
+ os.mkdir(context.local_dir)
+ # export the point to a file
+ data_file_name = "idefixdata.csv"
+ data_file_path = os.path.join(context.local_dir, data_file_name)
+ with open(data_file_path, "w") as f:
+ # explicit dict convertion is needed for compatibility between python versions
+ f.write(repr(dict(point)))
+
+
+ def noRunFound(self, idx, point, context):
+ return True
+
+ def runjob(self, idx, point, context):
+ """
+ Create, launch and wait for the end of the job.
+ """
+ # srun
+ #ntasks = self.config["tasksPerEval"]
+ pointeval = os.path.join(os.getcwd(), "pointeval.py")
+ #command = "srun --ntasks={} --nodes=1 --chdir={} {} ".format(
+ #str(ntasks),
+ #context.local_dir,
+ #pointeval)
+ return_code = subprocess.check_call(pointeval, shell=True, cwd=context.local_dir)
+
+ def getResult(self, context):
+ """
+ Check the job state, fetch the result file.
+ """
+ error_file = os.path.join(context.local_dir, "idefixerror.txt")
+ result_file = os.path.join(context.local_dir, "idefixresult.txt")
+ with open(error_file, "r") as f:
+ error = f.read()
+ with open(result_file, "r") as f:
+ result_str = f.read()
+ result = eval(result_str)
+
+ return error, result
+
+def createExecutor(config):
+ return JobExecutor(config)
--- /dev/null
+#! /usr/bin/env python3
+import json
+import importlib
+from multiprocessing import Pool
+import traceback
+
+class StartJob:
+ def __init__(self, executor):
+ self.executor = executor
+
+ def __call__(self, idx, in_values):
+ error=None
+ out_values=None
+ try:
+ error, out_values = self.executor.evaluate(idx, in_values)
+ except Exception as e:
+ error=str(e)
+ traceback.print_exc()
+ return idx, in_values, out_values, error
+
+class TerminateJob:
+ def __init__(self, manager):
+ self.manager = manager
+
+ def __call__(self, result):
+ # without try statement we may experience deadlock in case of error.
+ try:
+ idx, in_values, out_values, error = result
+ if not error:
+ error = None
+ self.manager.addResult(idx, in_values, out_values, error)
+ except Exception as e:
+ traceback.print_exc()
+
+if __name__ == '__main__':
+ with open("idefixconfig.json", "r") as f:
+ config = json.load(f)
+ plugin_module = importlib.import_module(config["plugin"])
+ executor = plugin_module.createExecutor(config)
+ # global initialization - commun work for every evaluation.
+ executor.initialize()
+
+ itModuleName = config["sampleIterator"]
+ itModule = importlib.import_module(itModuleName)
+ sampleManager = itModule.SampleIterator()
+ sampleManager.writeHeaders()
+
+ nbbranches=config["nbbranches"]
+ pool = Pool(nbbranches)
+ runPoint = StartJob(executor)
+ endOk = TerminateJob(sampleManager)
+ for point in sampleManager:
+ pool.apply_async(runPoint, point, callback=endOk)
+ pool.close()
+ pool.join()
+ sampleManager.terminate()
--- /dev/null
+#! /usr/bin/env python3
+import traceback
+import os
+
+data_file_name = "idefixdata.csv"
+study_module = "idefixstudy.py"
+error_result = "idefixerror.txt"
+value_result = "idefixresult.txt"
+traceback_result = "idefixtraceback.txt"
+
+with open(data_file_name, "r") as f:
+ values = f.read()
+inputvals = eval(values)
+
+error=""
+result=None
+old_dir = os.getcwd()
+
+try:
+ os.chdir("..") # go to commun root directory
+ with open(study_module, "r") as study_file:
+ study_string = study_file.read()
+ exec(study_string)
+ result = _exec(**inputvals)
+except Exception as e:
+ error=str(e)
+ os.chdir(old_dir) # back to the current case job directory
+ with open(traceback_result, "w") as f:
+ traceback.print_exc(file=f)
+
+os.chdir(old_dir) # back to the current case job directory
+
+with open(error_result, "w") as f:
+ f.write(error)
+
+with open(value_result, "w") as f:
+ f.write(repr(result))
--- /dev/null
+#import pydefx
+import os
+import pickle
+import time
+import traceback
+import subprocess
+
+class Context:
+ def __init__(self):
+ #self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
+ pass
+
+class JobExecutor:
+ def __init__(self, config):
+ self.config = config
+
+ def initialize(self):
+ """
+ Execute prescript.
+ """
+ pointeval = os.path.join(os.getcwd(), "pointeval.py")
+ os.chmod(pointeval, 0o755)
+
+ def evaluate(self, idx, point):
+ """ This is executed for every point to be evaluated.
+ """
+ context = Context()
+ error = None
+ out_values = None
+ try:
+ self.prepare(idx, point, context)
+ if self.noRunFound(idx, point, context):
+ self.runjob(idx, point, context)
+ error, out_values = self.getResult(context)
+ except Exception as e:
+ error = str(e)
+ traceback.print_exc()
+ return error, out_values
+
+ def prepare(self, idx, point, context):
+ """
+ Define local and remote work directory.
+ Define job script.
+ """
+ root_dir = os.getcwd()
+ point_name = "job_"+str(idx)
+ context.local_dir = os.path.join(root_dir, point_name)
+ if not os.path.exists(context.local_dir):
+ os.mkdir(context.local_dir)
+ # export the point to a file
+ data_file_name = "idefixdata.csv"
+ data_file_path = os.path.join(context.local_dir, data_file_name)
+ with open(data_file_path, "w") as f:
+ # explicit dict convertion is needed for compatibility between python versions
+ f.write(repr(dict(point)))
+
+
+ def noRunFound(self, idx, point, context):
+ return True
+
+ def runjob(self, idx, point, context):
+ """
+ Create, launch and wait for the end of the job.
+ """
+ # srun
+ ntasks = self.config["tasksPerEval"]
+ pointeval = os.path.join(os.getcwd(), "pointeval.py")
+ command = "srun --ntasks={} --nodes=1 --chdir={} {} ".format(
+ str(ntasks),
+ context.local_dir,
+ pointeval)
+ return_code = subprocess.call(command, shell=True)
+
+ def getResult(self, context):
+ """
+ Check the job state, fetch the result file.
+ """
+ error_file = os.path.join(context.local_dir, "idefixerror.txt")
+ result_file = os.path.join(context.local_dir, "idefixresult.txt")
+ with open(error_file, "r") as f:
+ error = f.read()
+ with open(result_file, "r") as f:
+ result_str = f.read()
+ result = eval(result_str)
+
+ return error, result
+
+def createExecutor(config):
+ return JobExecutor(config)
if exit_code == "0" :
errorIfNoResults = True # we expect to have full results
else:
- errorMessage = "An error occured during the execution of the YACS schema."
+ errorMessage = "An error occured during the execution of the job."
else:
- errorMessage = "Failed to get the exit code of the YACS schema execution."
+ errorMessage = "Failed to get the exit code of the job."
elif state == "RUNNING" or state == "PAUSED" or state == "ERROR" :
# partial results may be available
+++ /dev/null
-SET(SCHEMA_FILES
- executor.py
- mainjob.py
- pointeval.py
- )
-
-INSTALL(FILES ${SCHEMA_FILES} DESTINATION ${SALOME_INSTALL_PYTHON}/pydefx/slurm)
+++ /dev/null
-#import pydefx
-import os
-import pickle
-import time
-import traceback
-import subprocess
-
-class Context:
- def __init__(self):
- #self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
- pass
-
-class JobExecutor:
- def __init__(self, config):
- self.config = config
-
- def initialize(self):
- """
- Execute prescript.
- """
- pointeval = os.path.join(os.getcwd(), "pointeval.py")
- os.chmod(pointeval, 0o755)
-
- def evaluate(self, idx, point):
- """ This is executed for every point to be evaluated.
- """
- context = Context()
- error = None
- out_values = None
- try:
- self.prepare(idx, point, context)
- if self.noRunFound(idx, point, context):
- self.runjob(idx, point, context)
- error, out_values = self.getResult(context)
- except Exception as e:
- error = str(e)
- traceback.print_exc()
- return error, out_values
-
- def prepare(self, idx, point, context):
- """
- Define local and remote work directory.
- Define job script.
- """
- root_dir = os.getcwd()
- point_name = "job_"+str(idx)
- context.local_dir = os.path.join(root_dir, point_name)
- if not os.path.exists(context.local_dir):
- os.mkdir(context.local_dir)
- # export the point to a file
- data_file_name = "idefixdata.csv"
- data_file_path = os.path.join(context.local_dir, data_file_name)
- with open(data_file_path, "w") as f:
- # explicit dict convertion is needed for compatibility between python versions
- f.write(repr(dict(point)))
-
-
- def noRunFound(self, idx, point, context):
- return True
-
- def runjob(self, idx, point, context):
- """
- Create, launch and wait for the end of the job.
- """
- # srun
- ntasks = self.config["tasksPerEval"]
- pointeval = os.path.join(os.getcwd(), "pointeval.py")
- command = "srun --ntasks={} --nodes=1 --chdir={} {} ".format(
- str(ntasks),
- context.local_dir,
- pointeval)
- return_code = subprocess.call(command, shell=True)
-
- def getResult(self, context):
- """
- Check the job state, fetch the result file.
- """
- error_file = os.path.join(context.local_dir, "idefixerror.txt")
- result_file = os.path.join(context.local_dir, "idefixresult.txt")
- with open(error_file, "r") as f:
- error = f.read()
- with open(result_file, "r") as f:
- result_str = f.read()
- result = eval(result_str)
-
- return error, result
-
-def createExecutor(config):
- return JobExecutor(config)
+++ /dev/null
-#! /usr/bin/env python3
-import json
-import importlib
-from multiprocessing import Pool
-import traceback
-
-class StartJob:
- def __init__(self, executor):
- self.executor = executor
-
- def __call__(self, idx, in_values):
- error=None
- out_values=None
- try:
- error, out_values = self.executor.evaluate(idx, in_values)
- except Exception as e:
- error=str(e)
- traceback.print_exc()
- return idx, in_values, out_values, error
-
-class TerminateJob:
- def __init__(self, manager):
- self.manager = manager
-
- def __call__(self, result):
- # without try statement we may experience deadlock in case of error.
- try:
- idx, in_values, out_values, error = result
- if not error:
- error = None
- self.manager.addResult(idx, in_values, out_values, error)
- except Exception as e:
- traceback.print_exc()
-
-if __name__ == '__main__':
- with open("idefixconfig.json", "r") as f:
- config = json.load(f)
- plugin_module = importlib.import_module(config["plugin"])
- executor = plugin_module.createExecutor(config)
- # global initialization - commun work for every evaluation.
- executor.initialize()
-
- itModuleName = config["sampleIterator"]
- itModule = importlib.import_module(itModuleName)
- sampleManager = itModule.SampleIterator()
- sampleManager.writeHeaders()
-
- nbbranches=config["nbbranches"]
- pool = Pool(nbbranches)
- runPoint = StartJob(executor)
- endOk = TerminateJob(sampleManager)
- for point in sampleManager:
- pool.apply_async(runPoint, point, callback=endOk)
- pool.close()
- pool.join()
- sampleManager.terminate()
+++ /dev/null
-#! /usr/bin/env python3
-import traceback
-import os
-
-data_file_name = "idefixdata.csv"
-study_module = "idefixstudy.py"
-error_result = "idefixerror.txt"
-value_result = "idefixresult.txt"
-traceback_result = "idefixtraceback.txt"
-
-with open(data_file_name, "r") as f:
- values = f.read()
-inputvals = eval(values)
-
-error=""
-result=None
-old_dir = os.getcwd()
-
-try:
- os.chdir("..") # go to commun root directory
- with open(study_module, "r") as study_file:
- study_string = study_file.read()
- exec(study_string)
- result = _exec(**inputvals)
-except Exception as e:
- error=str(e)
- os.chdir(old_dir) # back to the current case job directory
- with open(traceback_result, "w") as f:
- traceback.print_exc(file=f)
-
-os.chdir(old_dir) # back to the current case job directory
-
-with open(error_result, "w") as f:
- f.write(error)
-
-with open(value_result, "w") as f:
- f.write(repr(result))
import inspect
import pathlib
import os
+from .allpurposebuilder import AllPurposeBuilder
-class SlurmBuilder:
+class SlurmBuilder(AllPurposeBuilder):
def __init__(self, executor = None, pointEval = None, mainJob = None):
filename = inspect.getframeinfo(inspect.currentframe()).filename
install_root_directory = pathlib.Path(filename).resolve().parent
- install_files_directory = os.path.join(install_root_directory, "slurm")
+ install_files_directory = os.path.join(install_root_directory, "plugins")
if executor is None:
- executor = os.path.join(install_files_directory, "executor.py")
- self.executor = executor
-
- if pointEval is None:
- pointEval = os.path.join(install_files_directory, "pointeval.py")
- self.pointEval = pointEval
-
- if mainJob is None:
- mainJob = os.path.join(install_files_directory, "mainjob.py")
- self.mainJob = mainJob
-
- def getMainJob(self):
- return self.mainJob
-
- def getExecutor(self):
- return self.executor
-
- def getPointEval(self):
- return self.pointEval
-
- def getPluginName(self):
- basename = os.path.basename(self.executor)
- if not basename.endswith(".py"):
- raise Exception("File name {} does not end with '.py'.".format(
- self.executor))
- return basename[:-3]
+ executor = os.path.join(install_files_directory, "srunexecutor.py")
+ super().__init__(executor, pointEval, mainJob)
--- /dev/null
+import pydefx
+import os
+
+myParams = pydefx.Parameters()
+myParams.configureResource("eole")
+#myParams.createResultDirectory("/tmp")
+myParams.nb_branches = 4
+myParams.salome_parameters.resource_required.nb_proc = 4
+myParams.salome_parameters.result_directory=os.path.join(os.getcwd(),"rundir")
+myParams.salome_parameters.work_directory="/scratch/I35256/workingdir/testjob/"
+myParams.salome_parameters.local_directory = os.getcwd()
+myParams.salome_parameters.in_files=["template_jdd.txt", "mysolver.py"]
+
+pyScript = os.path.join(os.getcwd(), "mystudy.py")
+
+myScript = pydefx.PyScript()
+myScript.loadFile(pyScript)
+
+mySample = myScript.CreateEmptySample()
+mydata = {"x":range(10)}
+mySample.setInputValues(mydata)
+
+myStudy = pydefx.PyStudy()
+myStudy.createNewJob(myScript, mySample, myParams)
+myStudy.launch()
+
+myStudy.getJobState()
+myStudy.wait()
+print(myStudy.getResult())
+print(myStudy.sample)
+#print(myStudy.global_result)
--- /dev/null
+import pydefx
+import os
+
+myParams = pydefx.Parameters()
+myParams.configureResource("localhost")
+#myParams.createResultDirectory("/tmp")
+myParams.nb_branches = 4
+myParams.salome_parameters.resource_required.nb_proc = 1
+#myParams.salome_parameters.result_directory=os.path.join(os.getcwd(),"runbasic")
+#myParams.salome_parameters.work_directory="/scratch/I35256/workingdir/test_multijob/"
+myParams.salome_parameters.work_directory=os.path.join(os.getcwd(),"runbasic")
+myParams.salome_parameters.local_directory = os.getcwd()
+#myParams.salome_parameters.in_files=["template_jdd.txt", "mysolver.py"]
+
+#pyScript = os.path.join(os.getcwd(), "mystudy.py")
+pyScript = """
+def _exec(a,b):
+ d = a / b
+ return d
+"""
+
+myScript = pydefx.PyScript()
+#myScript.loadFile(pyScript)
+myScript.loadString(pyScript)
+
+mySample = myScript.CreateEmptySample()
+#mydata = {"x":range(10)}
+mydata = {"a":[x // 10 for x in range(100)],
+ "b":[x % 10 for x in range(100)]}
+mySample.setInputValues(mydata)
+
+myStudy = pydefx.LocalStudy(schemaBuilder=pydefx.LocalBuilder("lightexecutor"))
+myStudy.createNewJob(myScript, mySample, myParams)
+myStudy.launch()
+
+myStudy.getJobState()
+myStudy.wait()
+print(myStudy.getResult())
+print(myStudy.sample)
+#print(myStudy.global_result)
--- /dev/null
+import pydefx
+import os
+
+myParams = pydefx.Parameters()
+myParams.configureResource("eole")
+#myParams.createResultDirectory("/tmp")
+myParams.nb_branches = 4
+myParams.salome_parameters.resource_required.nb_proc = 1
+myParams.salome_parameters.result_directory=os.path.join(os.getcwd(),"runmulti")
+myParams.salome_parameters.work_directory="/scratch/I35256/workingdir/test_multijob/"
+myParams.salome_parameters.local_directory = os.getcwd()
+myParams.salome_parameters.in_files=["template_jdd.txt", "mysolver.py"]
+
+pyScript = os.path.join(os.getcwd(), "mystudy.py")
+
+myScript = pydefx.PyScript()
+myScript.loadFile(pyScript)
+
+mySample = myScript.CreateEmptySample()
+mydata = {"x":range(10)}
+mySample.setInputValues(mydata)
+
+myStudy = pydefx.MultiJobStudy()
+myStudy.createNewJob(myScript, mySample, myParams)
+myStudy.launch()
+
+myStudy.getJobState()
+myStudy.wait()
+print(myStudy.getResult())
+print(myStudy.sample)
+#print(myStudy.global_result)
--- /dev/null
+import pydefx
+import os
+
+myParams = pydefx.Parameters()
+myParams.configureResource("eole")
+#myParams.createResultDirectory("/tmp")
+myParams.nb_branches = 4
+myParams.salome_parameters.resource_required.nb_proc = 4
+myParams.salome_parameters.result_directory=os.path.join(os.getcwd(),"runsrun")
+myParams.salome_parameters.work_directory="/scratch/I35256/workingdir/test_srunjob/"
+myParams.salome_parameters.local_directory = os.getcwd()
+myParams.salome_parameters.in_files=["template_jdd.txt", "mysolver.py"]
+
+pyScript = os.path.join(os.getcwd(), "mystudy.py")
+
+myScript = pydefx.PyScript()
+myScript.loadFile(pyScript)
+
+mySample = myScript.CreateEmptySample()
+mydata = {"x":range(10)}
+mySample.setInputValues(mydata)
+
+myStudy = pydefx.SlurmStudy()
+myStudy.createNewJob(myScript, mySample, myParams)
+myStudy.launch()
+
+myStudy.getJobState()
+myStudy.wait()
+print(myStudy.getResult())
+print(myStudy.sample)
+#print(myStudy.global_result)
--- /dev/null
+#! /usr/bin/env python3
+import argparse
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description="Fake solver.")
+ parser.add_argument("jdd", help="Input file.")
+ parser.add_argument("resultat", help="Output file.")
+ args = parser.parse_args()
+ with open(args.jdd, 'r') as f:
+ in_value = float(f.read())
+ with open(args.resultat, 'w') as f:
+ f.write(str(in_value * in_value))
--- /dev/null
+import os
+def root_dir():
+ return os.getcwd()
+
+#def case_dir(*args):
+ #import hashlib
+ #h = hashlib.md5(repr(args).encode('utf-8'))
+ #return os.path.join(root_dir(), h.hexdigest())
+
+def case_dir(v):
+ case_name = "c_"+repr(v)
+ return os.path.join(root_dir(), case_name)
+
+class Study:
+ def __init__(self, value):
+ self.value = value
+ self.caseDir = case_dir(self.value)
+ self.rootDir = root_dir()
+
+ def getResults(self):
+ result_file = os.path.join(self.caseDir, "result.txt")
+ with open(result_file, 'r') as f:
+ result = float(f.read())
+ return result
+
+ def caseExists(self):
+ ok = True
+ if os.path.isdir(self.caseDir):
+ try:
+ self.getResults()
+ ok = True
+ except:
+ ok = False
+ else:
+ ok = False
+ return ok
+
+ def prepareCase(self):
+ if not os.path.isdir(self.caseDir):
+ os.mkdir(self.caseDir)
+ template_file = os.path.join(self.rootDir, "template_jdd.txt")
+ case_file = os.path.join(self.caseDir, "jdd.txt")
+ with open(template_file,'r') as f:
+ filedata = f.read()
+ filedata = filedata.format(**{'value':repr(self.value)})
+ with open(case_file,'w') as f:
+ f.write(filedata)
+
+ def runCase(self):
+ import subprocess
+ command = "{} {} {}".format(
+ os.path.join(self.rootDir, "mysolver.py"),
+ os.path.join(self.caseDir, "jdd.txt"),
+ os.path.join(self.caseDir, "result.txt"))
+ subprocess.run(command, shell=True)
+
+def _exec(x):
+ e = Study(x)
+ if not e.caseExists():
+ e.prepareCase()
+ e.runCase()
+ r = e.getResults()
+ return r