{
public:
PyStudyJob(const std::string& pymodule_name, const std::string& pyclass_name);
- PyStudyJob(py2cpp::PyPtr& pyStudyObj);
+ PyStudyJob(py2cpp::PyPtr& pyStudyObj);
PyStudyJob();
virtual ~PyStudyJob();
virtual std::string state();
, _sample(sample)
{
createNewJob(fnScript, params);
- /*if(_lastError.empty()) // no errors during parent construction
- {
- try
- {
- py2cpp::PyPtr pySample = createPySample(sample);
- py2cpp::PyFunction pyFn;
- pyFn.loadExp(_pyStudy, "createNewJob");
- pyFn(fnScript, pySample, params);
- }
- catch(std::exception& e)
- {
- _lastError = "An error occured while creating the job.\n";
- _lastError += e.what();
- }
- }*/
}
TPyStudyJob(py2cpp::PyPtr& pyStudyObj,
}
}
}
-
+
virtual ~TPyStudyJob(){}
virtual bool fetch()
{
#
SET(TEST_NAME ${COMPONENT_NAME}_SampleTest)
-ADD_TEST(${TEST_NAME} python ${SALOME_TEST_DRIVER} ${TIMEOUT} ./SampleTest)
+ADD_TEST(${TEST_NAME} ${SALOME_TEST_DRIVER} ${TIMEOUT} ./SampleTest)
SET_TESTS_PROPERTIES(${TEST_NAME} PROPERTIES
LABELS "${COMPONENT_NAME}"
)
SET(TEST_NAME ${COMPONENT_NAME}_StudyGeneralTest)
-ADD_TEST(${TEST_NAME} python ${SALOME_TEST_DRIVER} ${TIMEOUT} ./StudyGeneralTest)
+ADD_TEST(${TEST_NAME} ${SALOME_TEST_DRIVER} ${TIMEOUT} ./StudyGeneralTest)
SET_TESTS_PROPERTIES(${TEST_NAME} PROPERTIES
LABELS "${COMPONENT_NAME}"
)
SET(TEST_NAME ${COMPONENT_NAME}_StudyRestartTest)
-ADD_TEST(${TEST_NAME} python ${SALOME_TEST_DRIVER} ${TIMEOUT} ./StudyRestartTest)
+ADD_TEST(${TEST_NAME} ${SALOME_TEST_DRIVER} ${TIMEOUT} ./StudyRestartTest)
SET_TESTS_PROPERTIES(${TEST_NAME} PROPERTIES
LABELS "${COMPONENT_NAME}"
)
// --- Add a listener that collects test result
CPPUNIT_NS::TestResultCollector result;
- controller.addListener( &result );
+ controller.addListener( &result );
// --- Add a listener that print dots as test run.
#ifdef WIN32
#else
CPPUNIT_NS::BriefTestProgressListener progress;
#endif
- controller.addListener( &progress );
+ controller.addListener( &progress );
// --- Get the top level suite from the registry
testFile.open("test.log", std::ios::out | std::ios::app);
testFile << "------ Idefix test log:" << std::endl;
CPPUNIT_NS::CompilerOutputter outputter( &result, testFile );
- outputter.write();
+ outputter.write();
// --- Run the tests.
# set the parameters of the local job
self.params.salome_parameters.resource_required.name = "localhost"
self.params.salome_parameters.job_type = "command_salome" #"python_salome"
-
+
self.params.salome_parameters.work_directory = main_job_work_dir
self.params.createTmpResultDirectory()
result_directory = self.params.salome_parameters.result_directory
error = str(e)
traceback.print_exc()
return error, out_values
-
+
def prepare(self, idx, point, context):
"""
Define local and remote work directory.
salome_parameters.out_files = ["idefixresult.txt", "idefixerror.txt"]
salome_parameters.work_directory = point_remote_dir
salome_parameters.result_directory = context.local_dir
-
+
def noRunFound(self, idx, point, context):
return True
-
+
def runjob(self, idx, point, context):
"""
Create, launch and wait for the end of the job.
-#import pydefx
import os
import pickle
import time
error = None
out_values = None
studymodule=self.config["studymodule"]
- #studymodule += ".py"
- #with open(studymodule, "r") as study_file:
- #study_string = study_file.read()
- #try:
- #exec(study_string)
- #out_values = _exec(**inputvals)
- #except Exception as e:
- #error=str(e)
- #traceback.print_exc()
- # another way:
import importlib
try:
idefixstudy=importlib.import_module(studymodule)
out_values=idefixstudy._exec(**point)
except Exception as e:
- error=str(e)
+ error=str(e)
traceback.print_exc()
return error, out_values
-
def createExecutor(config):
return JobExecutor(config)
-#import pydefx
import os
import pickle
import time
class Context:
def __init__(self):
- #self.launcher = pydefx.salome_proxy.getLauncher() # getLauncher()
pass
class JobExecutor:
error = str(e)
traceback.print_exc()
return error, out_values
-
+
def prepare(self, idx, point, context):
"""
Define local and remote work directory.
# explicit dict convertion is needed for compatibility between python versions
f.write(repr(dict(point)))
-
def noRunFound(self, idx, point, context):
return True
-
+
def runjob(self, idx, point, context):
"""
Create, launch and wait for the end of the job.
"""
- # srun
- #ntasks = self.config["tasksPerEval"]
pointeval = os.path.join(os.getcwd(), "pointeval.py")
- #command = "srun --ntasks={} --nodes=1 --chdir={} {} ".format(
- #str(ntasks),
- #context.local_dir,
- #pointeval)
return_code = subprocess.check_call(pointeval, shell=True, cwd=context.local_dir)
def getResult(self, context):
self.manager.addResult(idx, in_values, out_values, error)
except Exception as e:
traceback.print_exc()
-
+
if __name__ == '__main__':
with open("idefixconfig.json", "r") as f:
config = json.load(f)
-#import pydefx
import os
import pickle
import time
self.config = config
def initialize(self):
- """
+ """
Execute prescript.
"""
pointeval = os.path.join(os.getcwd(), "pointeval.py")
error = str(e)
traceback.print_exc()
return error, out_values
-
+
def prepare(self, idx, point, context):
"""
Define local and remote work directory.
# explicit dict convertion is needed for compatibility between python versions
f.write(repr(dict(point)))
-
def noRunFound(self, idx, point, context):
return True
-
+
def runjob(self, idx, point, context):
"""
Create, launch and wait for the end of the job.
os.makedirs(main_job_work_dir)
# set the parameters of the local job
self.params.salome_parameters.job_type = self.jobType()
-
+
result_directory = self.params.salome_parameters.result_directory
# export sample to result_directory
inputFiles = self.sampleManager.prepareRun(self.sample, result_directory)
def _exec(n):
# get the job parameters
salome_parameters = pydefx.configuration.loadJobConfig()
-
+
launcher = pydefx.salome_proxy.getLauncher() # CORBA or not CORBA
-
+
# have a different working directory for each computation
resource = salome_parameters.resource_required.name
default_wd = pydefx.configuration.defaultWorkingDir(resource)
new_wd = os.path.join(default_wd, "myjob_"+str(n))
salome_parameters.work_directory = new_wd
-
+
# create and launch the job
job_id = launcher.createJob(salome_parameters)
launcher.launchJob(job_id)
-
+
# wait for the end of the job
jobState = launcher.getJobState(job_id)
while jobState != "FINISHED" and jobState != "FAILED" :