Salome HOME
WIP: localstudy.
authorOvidiu Mircescu <ovidiu.mircescu@edf.fr>
Thu, 8 Oct 2020 13:45:41 +0000 (15:45 +0200)
committerOvidiu Mircescu <ovidiu.mircescu@edf.fr>
Thu, 8 Oct 2020 13:45:41 +0000 (15:45 +0200)
32 files changed:
src/pydefx/CMakeLists.txt
src/pydefx/__init__.py
src/pydefx/allpurposebuilder.py [new file with mode: 0644]
src/pydefx/execconfiguration.py [new file with mode: 0644]
src/pydefx/localbuilder.py [new file with mode: 0644]
src/pydefx/localstudy.py [new file with mode: 0644]
src/pydefx/multijob/CMakeLists.txt [deleted file]
src/pydefx/multijob/executor.py [deleted file]
src/pydefx/multijob/mainjob.py [deleted file]
src/pydefx/multijob/pointeval.py [deleted file]
src/pydefx/multijobbuilder.py
src/pydefx/multijobstudy.py
src/pydefx/plugins/CMakeLists.txt [new file with mode: 0644]
src/pydefx/plugins/jobexecutor.py [new file with mode: 0644]
src/pydefx/plugins/lightexecutor.py [new file with mode: 0644]
src/pydefx/plugins/localexecutor.py [new file with mode: 0644]
src/pydefx/plugins/mainjob.py [new file with mode: 0644]
src/pydefx/plugins/pointeval.py [new file with mode: 0644]
src/pydefx/plugins/srunexecutor.py [new file with mode: 0644]
src/pydefx/pystudy.py
src/pydefx/slurm/CMakeLists.txt [deleted file]
src/pydefx/slurm/executor.py [deleted file]
src/pydefx/slurm/mainjob.py [deleted file]
src/pydefx/slurm/pointeval.py [deleted file]
src/pydefx/slurmbuilder.py
src/pyexample/multijob/launch.py [new file with mode: 0755]
src/pyexample/multijob/launch_local_basic.py [new file with mode: 0755]
src/pyexample/multijob/launch_multi.py [new file with mode: 0755]
src/pyexample/multijob/launch_srun.py [new file with mode: 0755]
src/pyexample/multijob/mysolver.py [new file with mode: 0755]
src/pyexample/multijob/mystudy.py [new file with mode: 0644]
src/pyexample/multijob/template_jdd.txt [new file with mode: 0644]

index 8f1609899680d36bf039b01f4c25035b507ca1cc..abb06320d6d769772ed1e5e257bce7d07f400ffc 100644 (file)
@@ -18,6 +18,7 @@
 #
 SET(SCRIPTS
   __init__.py
+  allpurposebuilder.py
   configuration.py
   parameters.py
   pyscript.py
@@ -33,9 +34,13 @@ SET(SCRIPTS
   multijobstudy.py
   slurmbuilder.py
   slurmstudy.py
+  localbuilder.py
+  localstudy.py
+  execconfiguration.py
   )
 
 INSTALL(FILES ${SCRIPTS} DESTINATION ${SALOME_INSTALL_PYTHON}/pydefx)
 ADD_SUBDIRECTORY(schemas)
-ADD_SUBDIRECTORY(multijob)
-ADD_SUBDIRECTORY(slurm)
+ADD_SUBDIRECTORY(plugins)
+#ADD_SUBDIRECTORY(multijob)
+#ADD_SUBDIRECTORY(slurm)
index d59cde88e3368df9e4a73e85c716749d3b52c186..9f8c9c1155dcf4e345284b83140c828489202241 100644 (file)
@@ -21,7 +21,12 @@ from .pyscript import PyScript
 from .pystudy import PyStudy
 from .sample import Sample
 from .defaultschemabuilder import DefaultSchemaBuilder
+from .allpurposebuilder import AllPurposeBuilder
+from .localbuilder import LocalBuilder
+from .multijobbuilder import MultiJobBuilder
+from .slurmbuilder import SlurmBuilder
 
 from .salome_proxy import forceSalomeServers, forceNoSalomeServers
 from .multijobstudy import MultiJobStudy
 from .slurmstudy import SlurmStudy
+from .localstudy import LocalStudy
diff --git a/src/pydefx/allpurposebuilder.py b/src/pydefx/allpurposebuilder.py
new file mode 100644 (file)
index 0000000..2722f4e
--- /dev/null
@@ -0,0 +1,56 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2019  EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+import inspect
+import pathlib
+import os
+
+class AllPurposeBuilder:
+  def __init__(self, executor = None, pointEval = None, mainJob = None):
+    filename = inspect.getframeinfo(inspect.currentframe()).filename
+    install_root_directory = pathlib.Path(filename).resolve().parent
+    install_files_directory = os.path.join(install_root_directory, "plugins")
+
+    if executor is None:
+      raise TypeError("Parameter executor should not be None.")
+    self.executor = executor
+
+    if pointEval is None:
+      pointEval = os.path.join(install_files_directory, "pointeval.py")
+    self.pointEval = pointEval
+
+    if mainJob is None:
+      mainJob = os.path.join(install_files_directory, "mainjob.py")
+    self.mainJob = mainJob
+
+  def getMainJob(self):
+    return self.mainJob
+
+  def getExecutor(self):
+    return self.executor
+
+  def getPointEval(self):
+    return self.pointEval
+
+  def getPluginName(self):
+    basename = os.path.basename(self.executor)
+    if not basename.endswith(".py"):
+      raise ValueError("File name {} does not end with '.py'.".format(
+                                                                 self.executor))
+    return basename[:-3]
diff --git a/src/pydefx/execconfiguration.py b/src/pydefx/execconfiguration.py
new file mode 100644 (file)
index 0000000..a6cc5bc
--- /dev/null
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2019  EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+import json
+from . import parameters
+
+def SalomeParameters():
+  """
+  This function can be called during the evaluation of a point in order to get
+  the parameters of the job.
+  """
+  result = None
+  try:
+    with open("idefixconfig.json", "r") as f:
+      config = json.load(f)
+    params = parameters.Parameters()
+    params.loadDict(config["params"])
+    result = params.salome_parameters
+  except:
+    result = None
+  return result
+
+def GetConfig():
+  with open("idefixconfig.json", "r") as f:
+    config = json.load(f)
+  return config
diff --git a/src/pydefx/localbuilder.py b/src/pydefx/localbuilder.py
new file mode 100644 (file)
index 0000000..0b7e1b1
--- /dev/null
@@ -0,0 +1,37 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2019  EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+import inspect
+import pathlib
+import os
+from .allpurposebuilder import AllPurposeBuilder
+
+class LocalBuilder(AllPurposeBuilder):
+  def __init__(self, executor = None, pointEval = None, mainJob = None):
+    filename = inspect.getframeinfo(inspect.currentframe()).filename
+    install_root_directory = pathlib.Path(filename).resolve().parent
+    install_files_directory = os.path.join(install_root_directory, "plugins")
+    if executor is None:
+      executor = os.path.join(install_files_directory, "localexecutor.py")
+    elif executor == "localexecutor" or executor == "localexecutor.py":
+      executor = os.path.join(install_files_directory, "localexecutor.py")
+    elif executor == "lightexecutor" or executor == "lightexecutor.py":
+      executor = os.path.join(install_files_directory, "lightexecutor.py")
+    super().__init__(executor, pointEval, mainJob)
+
diff --git a/src/pydefx/localstudy.py b/src/pydefx/localstudy.py
new file mode 100644 (file)
index 0000000..0d046ee
--- /dev/null
@@ -0,0 +1,84 @@
+# -*- coding: utf-8 -*-
+# Copyright (C) 2019  EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+import copy
+import os
+import json
+from . import pystudy
+from . import localbuilder
+from . import salome_proxy
+
+
+class LocalStudy(pystudy.PyStudy):
+  """
+  This study uses one different job for each evaluation.
+  """
+  def __init__(self, sampleManager=None, schemaBuilder=None):
+    if schemaBuilder is None:
+      schemaBuilder = localbuilder.LocalBuilder()
+    super().__init__(sampleManager, schemaBuilder)
+
+  def createNewJob(self, script, sample, params):
+    self._check(script,sample)
+    self.sample = sample
+    self.params = copy.deepcopy(params)
+    main_job_work_dir = os.path.join(
+                                 self.params.salome_parameters.work_directory,
+                                 "idefixjob")
+    # dump the remote jobs parameters to the configuration file
+    params_dic = params.dumpDict()
+    # modify the parameters for the local loop job
+    self.params.salome_parameters.resource_required.name = "localhost"
+    self.params.salome_parameters.job_type = "command_salome" #"python_salome"
+    self.params.salome_parameters.work_directory = main_job_work_dir
+    self.params.createTmpResultDirectory()
+    result_directory = self.params.salome_parameters.result_directory
+    # export sample to result_directory
+    inputFiles = self.sampleManager.prepareRun(self.sample, result_directory)
+    inputFiles.extend([self.schemaBuilder.getExecutor(),
+                       self.schemaBuilder.getPointEval()])
+    self.params.salome_parameters.job_file = self.schemaBuilder.getMainJob()
+
+    # export config
+    configpath = os.path.join(result_directory, "idefixconfig.json")
+    dicconfig = {}
+    dicconfig["nbbranches"]  = self.params.nb_branches
+    dicconfig["studymodule"] = "idefixstudy"
+    dicconfig["sampleIterator"] = self.sampleManager.getModuleName()
+    dicconfig["params"] = params_dic
+    dicconfig["plugin"] = self.schemaBuilder.getPluginName()
+    with open(configpath, "w") as f:
+      json.dump(dicconfig, f, indent=2)
+    studypath = os.path.join(result_directory, "idefixstudy.py")
+    with open(studypath, "w") as f:
+      f.write(script.script)
+
+    inputFiles.extend([configpath, studypath])
+
+    # this list manipulation is needed because in_files is not a python list
+    # if we don't use a salome session. In that case swig uses a python tuple
+    # in order to map a std::list as a parameter of a structure.
+    in_files_as_list = list(self.params.salome_parameters.in_files)
+    self.params.salome_parameters.in_files = in_files_as_list + inputFiles
+    launcher = salome_proxy.getLauncher()
+    self.job_id = launcher.createJob(self.params.salome_parameters)
+    return self.job_id
+
+  def jobType(self):
+    return "command_salome"
diff --git a/src/pydefx/multijob/CMakeLists.txt b/src/pydefx/multijob/CMakeLists.txt
deleted file mode 100644 (file)
index 6978973..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-SET(SCHEMA_FILES
-  executor.py
-  mainjob.py
-  pointeval.py
-  )
-
-INSTALL(FILES ${SCHEMA_FILES} DESTINATION ${SALOME_INSTALL_PYTHON}/pydefx/multijob)
diff --git a/src/pydefx/multijob/executor.py b/src/pydefx/multijob/executor.py
deleted file mode 100644 (file)
index 4d56df0..0000000
+++ /dev/null
@@ -1,129 +0,0 @@
-import pydefx
-import os
-import pickle
-import time
-import traceback
-
-class Context:
-  def __init__(self):
-    self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
-  pass
-
-class JobExecutor:
-  def __init__(self, config):
-    self.config = config
-
-  def initialize(self):
-    """ This is executed before the first evaluation.
-    Put here global processing needed by all the evaluations like the copy of
-    commun files.
-    """
-    # Copy the commun files to the root work directory
-    params = pydefx.Parameters() # global parameters
-    params.loadDict(self.config["params"])
-    # use a fake empty command.
-    # Using launcher to copy some files on the remote file system,
-    # without launching a job.
-    command = os.path.join(os.getcwd(), "empty.sh")
-    open(command, "w").close()
-    params.salome_parameters.job_file = command
-    params.salome_parameters.job_type = "command"
-    study_module = os.path.join(os.getcwd(), self.config["studymodule"]+".py")
-    infiles = list(params.salome_parameters.in_files)
-    params.salome_parameters.in_files = infiles + [study_module]
-    launcher = pydefx.salome_proxy.getLauncher()
-    job_id = launcher.createJob(params.salome_parameters)
-    launcher.exportInputFiles(job_id)
-
-  def evaluate(self, idx, point):
-    """ This is executed for every point to be evaluated.
-    """
-    context = Context()
-    error = None
-    out_values = None
-    try:
-      self.prepare(idx, point, context)
-      if self.noRunFound(idx, point, context):
-        self.runjob(idx, point, context)
-      error, out_values = self.getResult(context)
-    except Exception as e:
-      error = str(e)
-      traceback.print_exc()
-    return error, out_values
-  
-  def prepare(self, idx, point, context):
-    """
-    Define local and remote work directory.
-    Define job script.
-    """
-    context.params = pydefx.Parameters()
-    context.params.loadDict(self.config["params"])
-    salome_parameters = context.params.salome_parameters
-    root_local_dir = salome_parameters.result_directory
-    root_remote_dir = salome_parameters.work_directory
-    input_files = [] # commun files are already copied to the root directory
-    point_name = "job_"+str(idx)
-    context.local_dir = os.path.join(root_local_dir, point_name)
-    point_remote_dir = os.path.join(root_remote_dir, point_name)
-    if not os.path.exists(context.local_dir):
-      os.mkdir(context.local_dir)
-    # export the point to a file
-    data_file_name = "idefixdata.csv"
-    data_file_path = os.path.join(context.local_dir, data_file_name)
-    with open(data_file_path, "w") as f:
-      # explicit dict convertion is needed for compatibility between python versions
-      f.write(repr(dict(point)))
-    input_files.append(data_file_path)
-
-    #command_path = os.path.join(root_local_dir, "command.py")
-    #salome_parameters.job_type = "command_salome"
-    #salome_parameters.job_file = command_path
-
-    salome_parameters.in_files = input_files
-    salome_parameters.out_files = ["idefixresult.txt", "idefixerror.txt"]
-    salome_parameters.work_directory = point_remote_dir
-    salome_parameters.result_directory = context.local_dir
-  
-  def noRunFound(self, idx, point, context):
-    return True
-  
-  def runjob(self, idx, point, context):
-    """
-    Create, launch and wait for the end of the job.
-    """
-    import random
-    sleep_delay = random.randint(5, 15) #10
-    #launcher = pydefx.salome_proxy.getLauncher()
-    launcher = context.launcher
-    context.job_id = launcher.createJob(context.params.salome_parameters)
-    launcher.launchJob(context.job_id)
-    jobState = launcher.getJobState(context.job_id)
-    while jobState=="QUEUED" or jobState=="IN_PROCESS" or jobState=="RUNNING" :
-      time.sleep(sleep_delay)
-      jobState = launcher.getJobState(context.job_id)
-
-  def getResult(self, context):
-    """
-    Check the job state, fetch the result file.
-    """
-    #launcher = pydefx.salome_proxy.getLauncher()
-    launcher = context.launcher
-    jobState = launcher.getJobState(context.job_id)
-    error=""
-    result=None
-    if jobState != "FINISHED" :
-      error = "Job has not finished correctly."
-    else:
-      launcher.getJobResults(context.job_id, "")
-      error_file = os.path.join(context.local_dir, "idefixerror.txt")
-      result_file = os.path.join(context.local_dir, "idefixresult.txt")
-      with open(error_file, "r") as f:
-        error = f.read()
-      with open(result_file, "r") as f:
-        result_str = f.read()
-        result = eval(result_str)
-
-    return error, result
-
-def createExecutor(config):
-  return JobExecutor(config)
diff --git a/src/pydefx/multijob/mainjob.py b/src/pydefx/multijob/mainjob.py
deleted file mode 100644 (file)
index b9cfe3f..0000000
+++ /dev/null
@@ -1,56 +0,0 @@
-#! /usr/bin/env python3
-import json
-import importlib
-from multiprocessing import Pool
-import traceback
-
-class StartJob:
-  def __init__(self, executor):
-    self.executor = executor
-
-  def __call__(self, idx, in_values):
-    error=None
-    out_values=None
-    try:
-      error, out_values = self.executor.evaluate(idx, in_values)
-    except Exception as e:
-      error=str(e)
-      traceback.print_exc()
-    return idx, in_values, out_values, error
-
-class TerminateJob:
-  def __init__(self, manager):
-    self.manager = manager
-
-  def __call__(self, result):
-    # without try statement we may experience deadlock in case of error.
-    try:
-      idx, in_values, out_values, error = result
-      if not error:
-        error = None
-      self.manager.addResult(idx, in_values, out_values, error)
-    except Exception as e:
-      traceback.print_exc()
-  
-if __name__ == '__main__':
-  with open("idefixconfig.json", "r") as f:
-    config = json.load(f)
-  plugin_module = importlib.import_module(config["plugin"])
-  executor = plugin_module.createExecutor(config)
-  # global initialization - commun work for every evaluation.
-  executor.initialize()
-
-  itModuleName = config["sampleIterator"]
-  itModule = importlib.import_module(itModuleName)
-  sampleManager = itModule.SampleIterator()
-  sampleManager.writeHeaders()
-
-  nbbranches=config["nbbranches"]
-  pool = Pool(nbbranches)
-  runPoint = StartJob(executor)
-  endOk = TerminateJob(sampleManager)
-  for point in sampleManager:
-    pool.apply_async(runPoint, point, callback=endOk)
-  pool.close()
-  pool.join()
-  sampleManager.terminate()
diff --git a/src/pydefx/multijob/pointeval.py b/src/pydefx/multijob/pointeval.py
deleted file mode 100644 (file)
index c652b26..0000000
+++ /dev/null
@@ -1,37 +0,0 @@
-#! /usr/bin/env python3
-import traceback
-import os
-
-data_file_name = "idefixdata.csv"
-study_module = "idefixstudy.py"
-error_result = "idefixerror.txt"
-value_result = "idefixresult.txt"
-traceback_result = "idefixtraceback.txt"
-
-with open(data_file_name, "r") as f:
-  values = f.read()
-inputvals = eval(values)
-
-error=""
-result=None
-old_dir = os.getcwd()
-
-try:
-  os.chdir("..") # go to commun root directory
-  with open(study_module, "r") as study_file:
-    study_string = study_file.read()
-  exec(study_string)
-  result = _exec(**inputvals)
-except Exception as e:
-  error=str(e)
-  os.chdir(old_dir) # back to the current case job directory
-  with open(traceback_result, "w") as f:
-    traceback.print_exc(file=f)
-
-os.chdir(old_dir) # back to the current case job directory
-
-with open(error_result, "w") as f:
-  f.write(error)
-
-with open(value_result, "w") as f:
-  f.write(repr(result))
index c4d3ec03b431459022eeb525364032f5167f06b7..73a7c6bd890e7941f4ec4d4f582552af5e6a6a0c 100644 (file)
 import inspect
 import pathlib
 import os
+from .allpurposebuilder import AllPurposeBuilder
 
-class MultiJobBuilder:
+class MultiJobBuilder(AllPurposeBuilder):
   def __init__(self, executor = None, pointEval = None, mainJob = None):
     filename = inspect.getframeinfo(inspect.currentframe()).filename
     install_root_directory = pathlib.Path(filename).resolve().parent
-    install_files_directory = os.path.join(install_root_directory, "multijob")
+    install_files_directory = os.path.join(install_root_directory, "plugins")
 
     if executor is None:
-      executor = os.path.join(install_files_directory, "executor.py")
-    self.executor = executor
-
-    if pointEval is None:
-      pointEval = os.path.join(install_files_directory, "pointeval.py")
-    self.pointEval = pointEval
-
-    if mainJob is None:
-      mainJob = os.path.join(install_files_directory, "mainjob.py")
-    self.mainJob = mainJob
-
-  def getMainJob(self):
-    return self.mainJob
-
-  def getExecutor(self):
-    return self.executor
-
-  def getPointEval(self):
-    return self.pointEval
-
-  def getPluginName(self):
-    basename = os.path.basename(self.executor)
-    if not basename.endswith(".py"):
-      raise Exception("File name {} does not end with '.py'.".format(
-                                                                 self.executor))
-    return basename[:-3]
+      executor = os.path.join(install_files_directory, "jobexecutor.py")
+    super().__init__(executor, pointEval, mainJob)
index 1d2f9f29fe8576e83cbe629a54de77a75a59b6b1..2f31656eb6db3d8af41bcc294da984236f8aa310 100644 (file)
@@ -55,7 +55,6 @@ class MultiJobStudy(pystudy.PyStudy):
     inputFiles.extend([self.schemaBuilder.getExecutor(),
                        self.schemaBuilder.getPointEval()])
     self.params.salome_parameters.job_file = self.schemaBuilder.getMainJob()
-    #schema_path, extra_files, config_info = self.schemaBuilder.buildSchema(result_directory)
 
     # export config
     configpath = os.path.join(result_directory, "idefixconfig.json")
@@ -83,4 +82,4 @@ class MultiJobStudy(pystudy.PyStudy):
     return self.job_id
 
   def jobType(self):
-    return "python_salome"
+    return "command_salome"
diff --git a/src/pydefx/plugins/CMakeLists.txt b/src/pydefx/plugins/CMakeLists.txt
new file mode 100644 (file)
index 0000000..741acbc
--- /dev/null
@@ -0,0 +1,10 @@
+SET(SCHEMA_FILES
+  jobexecutor.py
+  lightexecutor.py
+  localexecutor.py
+  srunexecutor.py
+  mainjob.py
+  pointeval.py
+  )
+
+INSTALL(FILES ${SCHEMA_FILES} DESTINATION ${SALOME_INSTALL_PYTHON}/pydefx/plugins)
diff --git a/src/pydefx/plugins/jobexecutor.py b/src/pydefx/plugins/jobexecutor.py
new file mode 100644 (file)
index 0000000..4d56df0
--- /dev/null
@@ -0,0 +1,129 @@
+import pydefx
+import os
+import pickle
+import time
+import traceback
+
+class Context:
+  def __init__(self):
+    self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
+  pass
+
+class JobExecutor:
+  def __init__(self, config):
+    self.config = config
+
+  def initialize(self):
+    """ This is executed before the first evaluation.
+    Put here global processing needed by all the evaluations like the copy of
+    commun files.
+    """
+    # Copy the commun files to the root work directory
+    params = pydefx.Parameters() # global parameters
+    params.loadDict(self.config["params"])
+    # use a fake empty command.
+    # Using launcher to copy some files on the remote file system,
+    # without launching a job.
+    command = os.path.join(os.getcwd(), "empty.sh")
+    open(command, "w").close()
+    params.salome_parameters.job_file = command
+    params.salome_parameters.job_type = "command"
+    study_module = os.path.join(os.getcwd(), self.config["studymodule"]+".py")
+    infiles = list(params.salome_parameters.in_files)
+    params.salome_parameters.in_files = infiles + [study_module]
+    launcher = pydefx.salome_proxy.getLauncher()
+    job_id = launcher.createJob(params.salome_parameters)
+    launcher.exportInputFiles(job_id)
+
+  def evaluate(self, idx, point):
+    """ This is executed for every point to be evaluated.
+    """
+    context = Context()
+    error = None
+    out_values = None
+    try:
+      self.prepare(idx, point, context)
+      if self.noRunFound(idx, point, context):
+        self.runjob(idx, point, context)
+      error, out_values = self.getResult(context)
+    except Exception as e:
+      error = str(e)
+      traceback.print_exc()
+    return error, out_values
+  
+  def prepare(self, idx, point, context):
+    """
+    Define local and remote work directory.
+    Define job script.
+    """
+    context.params = pydefx.Parameters()
+    context.params.loadDict(self.config["params"])
+    salome_parameters = context.params.salome_parameters
+    root_local_dir = salome_parameters.result_directory
+    root_remote_dir = salome_parameters.work_directory
+    input_files = [] # commun files are already copied to the root directory
+    point_name = "job_"+str(idx)
+    context.local_dir = os.path.join(root_local_dir, point_name)
+    point_remote_dir = os.path.join(root_remote_dir, point_name)
+    if not os.path.exists(context.local_dir):
+      os.mkdir(context.local_dir)
+    # export the point to a file
+    data_file_name = "idefixdata.csv"
+    data_file_path = os.path.join(context.local_dir, data_file_name)
+    with open(data_file_path, "w") as f:
+      # explicit dict convertion is needed for compatibility between python versions
+      f.write(repr(dict(point)))
+    input_files.append(data_file_path)
+
+    #command_path = os.path.join(root_local_dir, "command.py")
+    #salome_parameters.job_type = "command_salome"
+    #salome_parameters.job_file = command_path
+
+    salome_parameters.in_files = input_files
+    salome_parameters.out_files = ["idefixresult.txt", "idefixerror.txt"]
+    salome_parameters.work_directory = point_remote_dir
+    salome_parameters.result_directory = context.local_dir
+  
+  def noRunFound(self, idx, point, context):
+    return True
+  
+  def runjob(self, idx, point, context):
+    """
+    Create, launch and wait for the end of the job.
+    """
+    import random
+    sleep_delay = random.randint(5, 15) #10
+    #launcher = pydefx.salome_proxy.getLauncher()
+    launcher = context.launcher
+    context.job_id = launcher.createJob(context.params.salome_parameters)
+    launcher.launchJob(context.job_id)
+    jobState = launcher.getJobState(context.job_id)
+    while jobState=="QUEUED" or jobState=="IN_PROCESS" or jobState=="RUNNING" :
+      time.sleep(sleep_delay)
+      jobState = launcher.getJobState(context.job_id)
+
+  def getResult(self, context):
+    """
+    Check the job state, fetch the result file.
+    """
+    #launcher = pydefx.salome_proxy.getLauncher()
+    launcher = context.launcher
+    jobState = launcher.getJobState(context.job_id)
+    error=""
+    result=None
+    if jobState != "FINISHED" :
+      error = "Job has not finished correctly."
+    else:
+      launcher.getJobResults(context.job_id, "")
+      error_file = os.path.join(context.local_dir, "idefixerror.txt")
+      result_file = os.path.join(context.local_dir, "idefixresult.txt")
+      with open(error_file, "r") as f:
+        error = f.read()
+      with open(result_file, "r") as f:
+        result_str = f.read()
+        result = eval(result_str)
+
+    return error, result
+
+def createExecutor(config):
+  return JobExecutor(config)
diff --git a/src/pydefx/plugins/lightexecutor.py b/src/pydefx/plugins/lightexecutor.py
new file mode 100644 (file)
index 0000000..22f32a9
--- /dev/null
@@ -0,0 +1,52 @@
+#import pydefx
+import os
+import pickle
+import time
+import traceback
+import subprocess
+
+class Context:
+  def __init__(self):
+    #self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
+    pass
+
+class JobExecutor:
+  def __init__(self, config):
+    self.config = config
+
+  def initialize(self):
+    """ This is executed before the first evaluation.
+    Put here global processing needed by all the evaluations like the copy of
+    commun files.
+    """
+    pass
+
+  def evaluate(self, idx, point):
+    """ This is executed for every point to be evaluated.
+    """
+    context = Context()
+    error = None
+    out_values = None
+    studymodule=self.config["studymodule"]
+    #studymodule += ".py"
+    #with open(studymodule, "r") as study_file:
+      #study_string = study_file.read()
+    #try:
+      #exec(study_string)
+      #out_values = _exec(**inputvals)
+    #except Exception as e:
+      #error=str(e) 
+      #traceback.print_exc()
+    # another way:
+    import importlib
+    try:
+      idefixstudy=importlib.import_module(studymodule)
+      out_values=idefixstudy._exec(**point)
+    except Exception as e:
+      error=str(e) 
+      traceback.print_exc()
+    return error, out_values
+  
+
+def createExecutor(config):
+  return JobExecutor(config)
diff --git a/src/pydefx/plugins/localexecutor.py b/src/pydefx/plugins/localexecutor.py
new file mode 100644 (file)
index 0000000..7a0cf46
--- /dev/null
@@ -0,0 +1,90 @@
+#import pydefx
+import os
+import pickle
+import time
+import traceback
+import subprocess
+
+class Context:
+  def __init__(self):
+    #self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
+    pass
+
+class JobExecutor:
+  def __init__(self, config):
+    self.config = config
+
+  def initialize(self):
+    """ This is executed before the first evaluation.
+    Put here global processing needed by all the evaluations like the copy of
+    commun files.
+    """
+    pointeval = os.path.join(os.getcwd(), "pointeval.py")
+    os.chmod(pointeval, 0o755)
+
+  def evaluate(self, idx, point):
+    """ This is executed for every point to be evaluated.
+    """
+    context = Context()
+    error = None
+    out_values = None
+    try:
+      self.prepare(idx, point, context)
+      if self.noRunFound(idx, point, context):
+        self.runjob(idx, point, context)
+      error, out_values = self.getResult(context)
+    except Exception as e:
+      error = str(e)
+      traceback.print_exc()
+    return error, out_values
+  
+  def prepare(self, idx, point, context):
+    """
+    Define local and remote work directory.
+    Define job script.
+    """
+    root_dir = os.getcwd()
+    point_name = "job_"+str(idx)
+    context.local_dir = os.path.join(root_dir, point_name)
+    if not os.path.exists(context.local_dir):
+      os.mkdir(context.local_dir)
+    # export the point to a file
+    data_file_name = "idefixdata.csv"
+    data_file_path = os.path.join(context.local_dir, data_file_name)
+    with open(data_file_path, "w") as f:
+      # explicit dict convertion is needed for compatibility between python versions
+      f.write(repr(dict(point)))
+
+  
+  def noRunFound(self, idx, point, context):
+    return True
+  
+  def runjob(self, idx, point, context):
+    """
+    Create, launch and wait for the end of the job.
+    """
+    # srun
+    #ntasks = self.config["tasksPerEval"]
+    pointeval = os.path.join(os.getcwd(), "pointeval.py")
+    #command = "srun --ntasks={} --nodes=1 --chdir={} {} ".format(
+                                                             #str(ntasks),
+                                                             #context.local_dir,
+                                                             #pointeval)
+    return_code = subprocess.check_call(pointeval, shell=True, cwd=context.local_dir)
+
+  def getResult(self, context):
+    """
+    Check the job state, fetch the result file.
+    """
+    error_file = os.path.join(context.local_dir, "idefixerror.txt")
+    result_file = os.path.join(context.local_dir, "idefixresult.txt")
+    with open(error_file, "r") as f:
+      error = f.read()
+    with open(result_file, "r") as f:
+      result_str = f.read()
+      result = eval(result_str)
+
+    return error, result
+
+def createExecutor(config):
+  return JobExecutor(config)
diff --git a/src/pydefx/plugins/mainjob.py b/src/pydefx/plugins/mainjob.py
new file mode 100644 (file)
index 0000000..b9cfe3f
--- /dev/null
@@ -0,0 +1,56 @@
+#! /usr/bin/env python3
+import json
+import importlib
+from multiprocessing import Pool
+import traceback
+
+class StartJob:
+  def __init__(self, executor):
+    self.executor = executor
+
+  def __call__(self, idx, in_values):
+    error=None
+    out_values=None
+    try:
+      error, out_values = self.executor.evaluate(idx, in_values)
+    except Exception as e:
+      error=str(e)
+      traceback.print_exc()
+    return idx, in_values, out_values, error
+
+class TerminateJob:
+  def __init__(self, manager):
+    self.manager = manager
+
+  def __call__(self, result):
+    # without try statement we may experience deadlock in case of error.
+    try:
+      idx, in_values, out_values, error = result
+      if not error:
+        error = None
+      self.manager.addResult(idx, in_values, out_values, error)
+    except Exception as e:
+      traceback.print_exc()
+  
+if __name__ == '__main__':
+  with open("idefixconfig.json", "r") as f:
+    config = json.load(f)
+  plugin_module = importlib.import_module(config["plugin"])
+  executor = plugin_module.createExecutor(config)
+  # global initialization - commun work for every evaluation.
+  executor.initialize()
+
+  itModuleName = config["sampleIterator"]
+  itModule = importlib.import_module(itModuleName)
+  sampleManager = itModule.SampleIterator()
+  sampleManager.writeHeaders()
+
+  nbbranches=config["nbbranches"]
+  pool = Pool(nbbranches)
+  runPoint = StartJob(executor)
+  endOk = TerminateJob(sampleManager)
+  for point in sampleManager:
+    pool.apply_async(runPoint, point, callback=endOk)
+  pool.close()
+  pool.join()
+  sampleManager.terminate()
diff --git a/src/pydefx/plugins/pointeval.py b/src/pydefx/plugins/pointeval.py
new file mode 100644 (file)
index 0000000..c652b26
--- /dev/null
@@ -0,0 +1,37 @@
+#! /usr/bin/env python3
+import traceback
+import os
+
+data_file_name = "idefixdata.csv"
+study_module = "idefixstudy.py"
+error_result = "idefixerror.txt"
+value_result = "idefixresult.txt"
+traceback_result = "idefixtraceback.txt"
+
+with open(data_file_name, "r") as f:
+  values = f.read()
+inputvals = eval(values)
+
+error=""
+result=None
+old_dir = os.getcwd()
+
+try:
+  os.chdir("..") # go to commun root directory
+  with open(study_module, "r") as study_file:
+    study_string = study_file.read()
+  exec(study_string)
+  result = _exec(**inputvals)
+except Exception as e:
+  error=str(e)
+  os.chdir(old_dir) # back to the current case job directory
+  with open(traceback_result, "w") as f:
+    traceback.print_exc(file=f)
+
+os.chdir(old_dir) # back to the current case job directory
+
+with open(error_result, "w") as f:
+  f.write(error)
+
+with open(value_result, "w") as f:
+  f.write(repr(result))
diff --git a/src/pydefx/plugins/srunexecutor.py b/src/pydefx/plugins/srunexecutor.py
new file mode 100644 (file)
index 0000000..a779c92
--- /dev/null
@@ -0,0 +1,89 @@
+#import pydefx
+import os
+import pickle
+import time
+import traceback
+import subprocess
+
+class Context:
+  def __init__(self):
+    #self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
+    pass
+
+class JobExecutor:
+  def __init__(self, config):
+    self.config = config
+
+  def initialize(self):
+    """ 
+    Execute prescript.
+    """
+    pointeval = os.path.join(os.getcwd(), "pointeval.py")
+    os.chmod(pointeval, 0o755)
+
+  def evaluate(self, idx, point):
+    """ This is executed for every point to be evaluated.
+    """
+    context = Context()
+    error = None
+    out_values = None
+    try:
+      self.prepare(idx, point, context)
+      if self.noRunFound(idx, point, context):
+        self.runjob(idx, point, context)
+      error, out_values = self.getResult(context)
+    except Exception as e:
+      error = str(e)
+      traceback.print_exc()
+    return error, out_values
+  
+  def prepare(self, idx, point, context):
+    """
+    Define local and remote work directory.
+    Define job script.
+    """
+    root_dir = os.getcwd()
+    point_name = "job_"+str(idx)
+    context.local_dir = os.path.join(root_dir, point_name)
+    if not os.path.exists(context.local_dir):
+      os.mkdir(context.local_dir)
+    # export the point to a file
+    data_file_name = "idefixdata.csv"
+    data_file_path = os.path.join(context.local_dir, data_file_name)
+    with open(data_file_path, "w") as f:
+      # explicit dict convertion is needed for compatibility between python versions
+      f.write(repr(dict(point)))
+
+  
+  def noRunFound(self, idx, point, context):
+    return True
+  
+  def runjob(self, idx, point, context):
+    """
+    Create, launch and wait for the end of the job.
+    """
+    # srun
+    ntasks = self.config["tasksPerEval"]
+    pointeval = os.path.join(os.getcwd(), "pointeval.py")
+    command = "srun --ntasks={} --nodes=1 --chdir={} {} ".format(
+                                                             str(ntasks),
+                                                             context.local_dir,
+                                                             pointeval)
+    return_code = subprocess.call(command, shell=True)
+
+  def getResult(self, context):
+    """
+    Check the job state, fetch the result file.
+    """
+    error_file = os.path.join(context.local_dir, "idefixerror.txt")
+    result_file = os.path.join(context.local_dir, "idefixresult.txt")
+    with open(error_file, "r") as f:
+      error = f.read()
+    with open(result_file, "r") as f:
+      result_str = f.read()
+      result = eval(result_str)
+
+    return error, result
+
+def createExecutor(config):
+  return JobExecutor(config)
index bf7cf194a2020c15647e77ec150885979e934f41..c2d59286e0eb1660944c92549e65a494968e6cdd 100644 (file)
@@ -193,9 +193,9 @@ class PyStudy:
         if exit_code == "0" :
           errorIfNoResults = True # we expect to have full results
         else:
-          errorMessage = "An error occured during the execution of the YACS schema."
+          errorMessage = "An error occured during the execution of the job."
       else:
-        errorMessage = "Failed to get the exit code of the YACS schema execution."
+        errorMessage = "Failed to get the exit code of the job."
 
     elif state == "RUNNING" or state == "PAUSED" or state == "ERROR" :
       # partial results may be available
diff --git a/src/pydefx/slurm/CMakeLists.txt b/src/pydefx/slurm/CMakeLists.txt
deleted file mode 100644 (file)
index 78b99a1..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-SET(SCHEMA_FILES
-  executor.py
-  mainjob.py
-  pointeval.py
-  )
-
-INSTALL(FILES ${SCHEMA_FILES} DESTINATION ${SALOME_INSTALL_PYTHON}/pydefx/slurm)
diff --git a/src/pydefx/slurm/executor.py b/src/pydefx/slurm/executor.py
deleted file mode 100644 (file)
index a779c92..0000000
+++ /dev/null
@@ -1,89 +0,0 @@
-#import pydefx
-import os
-import pickle
-import time
-import traceback
-import subprocess
-
-class Context:
-  def __init__(self):
-    #self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
-    pass
-
-class JobExecutor:
-  def __init__(self, config):
-    self.config = config
-
-  def initialize(self):
-    """ 
-    Execute prescript.
-    """
-    pointeval = os.path.join(os.getcwd(), "pointeval.py")
-    os.chmod(pointeval, 0o755)
-
-  def evaluate(self, idx, point):
-    """ This is executed for every point to be evaluated.
-    """
-    context = Context()
-    error = None
-    out_values = None
-    try:
-      self.prepare(idx, point, context)
-      if self.noRunFound(idx, point, context):
-        self.runjob(idx, point, context)
-      error, out_values = self.getResult(context)
-    except Exception as e:
-      error = str(e)
-      traceback.print_exc()
-    return error, out_values
-  
-  def prepare(self, idx, point, context):
-    """
-    Define local and remote work directory.
-    Define job script.
-    """
-    root_dir = os.getcwd()
-    point_name = "job_"+str(idx)
-    context.local_dir = os.path.join(root_dir, point_name)
-    if not os.path.exists(context.local_dir):
-      os.mkdir(context.local_dir)
-    # export the point to a file
-    data_file_name = "idefixdata.csv"
-    data_file_path = os.path.join(context.local_dir, data_file_name)
-    with open(data_file_path, "w") as f:
-      # explicit dict convertion is needed for compatibility between python versions
-      f.write(repr(dict(point)))
-
-  
-  def noRunFound(self, idx, point, context):
-    return True
-  
-  def runjob(self, idx, point, context):
-    """
-    Create, launch and wait for the end of the job.
-    """
-    # srun
-    ntasks = self.config["tasksPerEval"]
-    pointeval = os.path.join(os.getcwd(), "pointeval.py")
-    command = "srun --ntasks={} --nodes=1 --chdir={} {} ".format(
-                                                             str(ntasks),
-                                                             context.local_dir,
-                                                             pointeval)
-    return_code = subprocess.call(command, shell=True)
-
-  def getResult(self, context):
-    """
-    Check the job state, fetch the result file.
-    """
-    error_file = os.path.join(context.local_dir, "idefixerror.txt")
-    result_file = os.path.join(context.local_dir, "idefixresult.txt")
-    with open(error_file, "r") as f:
-      error = f.read()
-    with open(result_file, "r") as f:
-      result_str = f.read()
-      result = eval(result_str)
-
-    return error, result
-
-def createExecutor(config):
-  return JobExecutor(config)
diff --git a/src/pydefx/slurm/mainjob.py b/src/pydefx/slurm/mainjob.py
deleted file mode 100644 (file)
index b9cfe3f..0000000
+++ /dev/null
@@ -1,56 +0,0 @@
-#! /usr/bin/env python3
-import json
-import importlib
-from multiprocessing import Pool
-import traceback
-
-class StartJob:
-  def __init__(self, executor):
-    self.executor = executor
-
-  def __call__(self, idx, in_values):
-    error=None
-    out_values=None
-    try:
-      error, out_values = self.executor.evaluate(idx, in_values)
-    except Exception as e:
-      error=str(e)
-      traceback.print_exc()
-    return idx, in_values, out_values, error
-
-class TerminateJob:
-  def __init__(self, manager):
-    self.manager = manager
-
-  def __call__(self, result):
-    # without try statement we may experience deadlock in case of error.
-    try:
-      idx, in_values, out_values, error = result
-      if not error:
-        error = None
-      self.manager.addResult(idx, in_values, out_values, error)
-    except Exception as e:
-      traceback.print_exc()
-  
-if __name__ == '__main__':
-  with open("idefixconfig.json", "r") as f:
-    config = json.load(f)
-  plugin_module = importlib.import_module(config["plugin"])
-  executor = plugin_module.createExecutor(config)
-  # global initialization - commun work for every evaluation.
-  executor.initialize()
-
-  itModuleName = config["sampleIterator"]
-  itModule = importlib.import_module(itModuleName)
-  sampleManager = itModule.SampleIterator()
-  sampleManager.writeHeaders()
-
-  nbbranches=config["nbbranches"]
-  pool = Pool(nbbranches)
-  runPoint = StartJob(executor)
-  endOk = TerminateJob(sampleManager)
-  for point in sampleManager:
-    pool.apply_async(runPoint, point, callback=endOk)
-  pool.close()
-  pool.join()
-  sampleManager.terminate()
diff --git a/src/pydefx/slurm/pointeval.py b/src/pydefx/slurm/pointeval.py
deleted file mode 100644 (file)
index c652b26..0000000
+++ /dev/null
@@ -1,37 +0,0 @@
-#! /usr/bin/env python3
-import traceback
-import os
-
-data_file_name = "idefixdata.csv"
-study_module = "idefixstudy.py"
-error_result = "idefixerror.txt"
-value_result = "idefixresult.txt"
-traceback_result = "idefixtraceback.txt"
-
-with open(data_file_name, "r") as f:
-  values = f.read()
-inputvals = eval(values)
-
-error=""
-result=None
-old_dir = os.getcwd()
-
-try:
-  os.chdir("..") # go to commun root directory
-  with open(study_module, "r") as study_file:
-    study_string = study_file.read()
-  exec(study_string)
-  result = _exec(**inputvals)
-except Exception as e:
-  error=str(e)
-  os.chdir(old_dir) # back to the current case job directory
-  with open(traceback_result, "w") as f:
-    traceback.print_exc(file=f)
-
-os.chdir(old_dir) # back to the current case job directory
-
-with open(error_result, "w") as f:
-  f.write(error)
-
-with open(value_result, "w") as f:
-  f.write(repr(result))
index 3f8eae2fb2302162bad0a3f8902140522a7ad03f..1388d4c806e29ee2852217912df67ff5aaed903f 100644 (file)
 import inspect
 import pathlib
 import os
+from .allpurposebuilder import AllPurposeBuilder
 
-class SlurmBuilder:
+class SlurmBuilder(AllPurposeBuilder):
   def __init__(self, executor = None, pointEval = None, mainJob = None):
     filename = inspect.getframeinfo(inspect.currentframe()).filename
     install_root_directory = pathlib.Path(filename).resolve().parent
-    install_files_directory = os.path.join(install_root_directory, "slurm")
+    install_files_directory = os.path.join(install_root_directory, "plugins")
 
     if executor is None:
-      executor = os.path.join(install_files_directory, "executor.py")
-    self.executor = executor
-
-    if pointEval is None:
-      pointEval = os.path.join(install_files_directory, "pointeval.py")
-    self.pointEval = pointEval
-
-    if mainJob is None:
-      mainJob = os.path.join(install_files_directory, "mainjob.py")
-    self.mainJob = mainJob
-
-  def getMainJob(self):
-    return self.mainJob
-
-  def getExecutor(self):
-    return self.executor
-
-  def getPointEval(self):
-    return self.pointEval
-
-  def getPluginName(self):
-    basename = os.path.basename(self.executor)
-    if not basename.endswith(".py"):
-      raise Exception("File name {} does not end with '.py'.".format(
-                                                                 self.executor))
-    return basename[:-3]
+      executor = os.path.join(install_files_directory, "srunexecutor.py")
+    super().__init__(executor, pointEval, mainJob)
diff --git a/src/pyexample/multijob/launch.py b/src/pyexample/multijob/launch.py
new file mode 100755 (executable)
index 0000000..f90755f
--- /dev/null
@@ -0,0 +1,31 @@
+import pydefx
+import os
+
+myParams = pydefx.Parameters()
+myParams.configureResource("eole")
+#myParams.createResultDirectory("/tmp")
+myParams.nb_branches = 4
+myParams.salome_parameters.resource_required.nb_proc = 4
+myParams.salome_parameters.result_directory=os.path.join(os.getcwd(),"rundir")
+myParams.salome_parameters.work_directory="/scratch/I35256/workingdir/testjob/"
+myParams.salome_parameters.local_directory = os.getcwd()
+myParams.salome_parameters.in_files=["template_jdd.txt", "mysolver.py"]
+
+pyScript = os.path.join(os.getcwd(), "mystudy.py")
+
+myScript = pydefx.PyScript()
+myScript.loadFile(pyScript)
+
+mySample = myScript.CreateEmptySample()
+mydata = {"x":range(10)}
+mySample.setInputValues(mydata)
+
+myStudy = pydefx.PyStudy()
+myStudy.createNewJob(myScript, mySample, myParams)
+myStudy.launch()
+
+myStudy.getJobState()
+myStudy.wait()
+print(myStudy.getResult())
+print(myStudy.sample)
+#print(myStudy.global_result)
diff --git a/src/pyexample/multijob/launch_local_basic.py b/src/pyexample/multijob/launch_local_basic.py
new file mode 100755 (executable)
index 0000000..13687dc
--- /dev/null
@@ -0,0 +1,40 @@
+import pydefx
+import os
+
+myParams = pydefx.Parameters()
+myParams.configureResource("localhost")
+#myParams.createResultDirectory("/tmp")
+myParams.nb_branches = 4
+myParams.salome_parameters.resource_required.nb_proc = 1
+#myParams.salome_parameters.result_directory=os.path.join(os.getcwd(),"runbasic")
+#myParams.salome_parameters.work_directory="/scratch/I35256/workingdir/test_multijob/"
+myParams.salome_parameters.work_directory=os.path.join(os.getcwd(),"runbasic")
+myParams.salome_parameters.local_directory = os.getcwd()
+#myParams.salome_parameters.in_files=["template_jdd.txt", "mysolver.py"]
+
+#pyScript = os.path.join(os.getcwd(), "mystudy.py")
+pyScript = """
+def _exec(a,b):
+  d = a / b
+  return d
+"""
+
+myScript = pydefx.PyScript()
+#myScript.loadFile(pyScript)
+myScript.loadString(pyScript)
+
+mySample = myScript.CreateEmptySample()
+#mydata = {"x":range(10)}
+mydata = {"a":[x // 10 for x in range(100)],
+          "b":[x % 10 for x in range(100)]}
+mySample.setInputValues(mydata)
+
+myStudy = pydefx.LocalStudy(schemaBuilder=pydefx.LocalBuilder("lightexecutor"))
+myStudy.createNewJob(myScript, mySample, myParams)
+myStudy.launch()
+
+myStudy.getJobState()
+myStudy.wait()
+print(myStudy.getResult())
+print(myStudy.sample)
+#print(myStudy.global_result)
diff --git a/src/pyexample/multijob/launch_multi.py b/src/pyexample/multijob/launch_multi.py
new file mode 100755 (executable)
index 0000000..2fa057f
--- /dev/null
@@ -0,0 +1,31 @@
+import pydefx
+import os
+
+myParams = pydefx.Parameters()
+myParams.configureResource("eole")
+#myParams.createResultDirectory("/tmp")
+myParams.nb_branches = 4
+myParams.salome_parameters.resource_required.nb_proc = 1
+myParams.salome_parameters.result_directory=os.path.join(os.getcwd(),"runmulti")
+myParams.salome_parameters.work_directory="/scratch/I35256/workingdir/test_multijob/"
+myParams.salome_parameters.local_directory = os.getcwd()
+myParams.salome_parameters.in_files=["template_jdd.txt", "mysolver.py"]
+
+pyScript = os.path.join(os.getcwd(), "mystudy.py")
+
+myScript = pydefx.PyScript()
+myScript.loadFile(pyScript)
+
+mySample = myScript.CreateEmptySample()
+mydata = {"x":range(10)}
+mySample.setInputValues(mydata)
+
+myStudy = pydefx.MultiJobStudy()
+myStudy.createNewJob(myScript, mySample, myParams)
+myStudy.launch()
+
+myStudy.getJobState()
+myStudy.wait()
+print(myStudy.getResult())
+print(myStudy.sample)
+#print(myStudy.global_result)
diff --git a/src/pyexample/multijob/launch_srun.py b/src/pyexample/multijob/launch_srun.py
new file mode 100755 (executable)
index 0000000..b47e2f3
--- /dev/null
@@ -0,0 +1,31 @@
+import pydefx
+import os
+
+myParams = pydefx.Parameters()
+myParams.configureResource("eole")
+#myParams.createResultDirectory("/tmp")
+myParams.nb_branches = 4
+myParams.salome_parameters.resource_required.nb_proc = 4
+myParams.salome_parameters.result_directory=os.path.join(os.getcwd(),"runsrun")
+myParams.salome_parameters.work_directory="/scratch/I35256/workingdir/test_srunjob/"
+myParams.salome_parameters.local_directory = os.getcwd()
+myParams.salome_parameters.in_files=["template_jdd.txt", "mysolver.py"]
+
+pyScript = os.path.join(os.getcwd(), "mystudy.py")
+
+myScript = pydefx.PyScript()
+myScript.loadFile(pyScript)
+
+mySample = myScript.CreateEmptySample()
+mydata = {"x":range(10)}
+mySample.setInputValues(mydata)
+
+myStudy = pydefx.SlurmStudy()
+myStudy.createNewJob(myScript, mySample, myParams)
+myStudy.launch()
+
+myStudy.getJobState()
+myStudy.wait()
+print(myStudy.getResult())
+print(myStudy.sample)
+#print(myStudy.global_result)
diff --git a/src/pyexample/multijob/mysolver.py b/src/pyexample/multijob/mysolver.py
new file mode 100755 (executable)
index 0000000..17fe170
--- /dev/null
@@ -0,0 +1,11 @@
+#! /usr/bin/env python3
+import argparse
+if __name__ == '__main__':
+  parser = argparse.ArgumentParser(description="Fake solver.")
+  parser.add_argument("jdd", help="Input file.")
+  parser.add_argument("resultat", help="Output file.")
+  args = parser.parse_args()
+  with open(args.jdd, 'r') as f:
+    in_value = float(f.read())
+  with open(args.resultat, 'w') as f:
+    f.write(str(in_value * in_value))
diff --git a/src/pyexample/multijob/mystudy.py b/src/pyexample/multijob/mystudy.py
new file mode 100644 (file)
index 0000000..63b1917
--- /dev/null
@@ -0,0 +1,63 @@
+import os
+def root_dir():
+  return os.getcwd()
+
+#def case_dir(*args):
+  #import hashlib
+  #h = hashlib.md5(repr(args).encode('utf-8'))
+  #return os.path.join(root_dir(), h.hexdigest())
+
+def case_dir(v):
+  case_name = "c_"+repr(v)
+  return os.path.join(root_dir(), case_name)
+
+class Study:
+  def __init__(self, value):
+    self.value = value
+    self.caseDir = case_dir(self.value)
+    self.rootDir = root_dir()
+
+  def getResults(self):
+    result_file = os.path.join(self.caseDir, "result.txt")
+    with open(result_file, 'r') as f:
+      result = float(f.read())
+    return result
+
+  def caseExists(self):
+    ok = True
+    if os.path.isdir(self.caseDir):
+      try:
+        self.getResults()
+        ok = True
+      except:
+        ok = False
+    else:
+      ok = False
+    return ok
+
+  def prepareCase(self):
+    if not os.path.isdir(self.caseDir):
+      os.mkdir(self.caseDir)
+    template_file = os.path.join(self.rootDir, "template_jdd.txt")
+    case_file = os.path.join(self.caseDir, "jdd.txt")
+    with open(template_file,'r') as f:
+      filedata = f.read()
+    filedata = filedata.format(**{'value':repr(self.value)})
+    with open(case_file,'w') as f:
+      f.write(filedata)
+
+  def runCase(self):
+    import subprocess
+    command = "{} {} {}".format(
+              os.path.join(self.rootDir, "mysolver.py"),
+              os.path.join(self.caseDir, "jdd.txt"),
+              os.path.join(self.caseDir, "result.txt"))
+    subprocess.run(command, shell=True)
+
+def _exec(x):
+  e = Study(x)
+  if not e.caseExists():
+    e.prepareCase()
+    e.runCase()
+  r = e.getResults()
+  return r
diff --git a/src/pyexample/multijob/template_jdd.txt b/src/pyexample/multijob/template_jdd.txt
new file mode 100644 (file)
index 0000000..4672706
--- /dev/null
@@ -0,0 +1 @@
+{value}