# Add all test subdirs
SUBDIRS( Launcher
+ Launcher_SWIG
LifeCycleCORBA_SWIG
NamingService
SALOMELocalTrace
#include "SALOME_Launcher_Handler.hxx"
#include "Launcher.hxx"
#include "Launcher_Job_Command.hxx"
+#include "Launcher_Job_YACSFile.hxx"
+#include "Launcher_Job_PythonSALOME.hxx"
+#include "Launcher_Job_CommandSALOME.hxx"
#include "Launcher_XML_Persistence.hxx"
using namespace std;
#ifdef WITH_LIBBATCH
std::map<int, Launcher::Job *>::const_iterator it_job;
for(it_job = _launcher_job_map.begin(); it_job != _launcher_job_map.end(); it_job++)
- it_job->second->decrRef();
+ delete it_job->second;
std::map <int, Batch::BatchManager * >::const_iterator it1;
for(it1=_batchmap.begin();it1!=_batchmap.end();it1++)
delete it1->second;
if (it_job == _launcher_job_map.end())
{
_launcher_job_map[new_job->getNumber()] = new_job;
- new_job->incrRef();
}
else
{
LAUNCHER_MESSAGE("New Job created");
}
+//=============================================================================
+/*!
+ * Add a job into the launcher - check resource and choose one
+ */
+//=============================================================================
+int
+Launcher_cpp::createJob(const JobParameters_cpp& job_parameters)
+{
+ std::string job_type = job_parameters.job_type;
+ Launcher::Job * new_job; // It is Launcher_cpp that is going to destroy it
+
+ if (job_type == Launcher::Job_Command::TYPE_NAME)
+ new_job = new Launcher::Job_Command();
+ else if (job_type == Launcher::Job_CommandSALOME::TYPE_NAME)
+ new_job = new Launcher::Job_CommandSALOME();
+ else if (job_type == Launcher::Job_YACSFile::TYPE_NAME)
+ new_job = new Launcher::Job_YACSFile();
+ else if (job_type == Launcher::Job_PythonSALOME::TYPE_NAME)
+ new_job = new Launcher::Job_PythonSALOME();
+ else
+ {
+ std::string message("Launcher_cpp::createJob: bad job type: ");
+ message += job_type;
+ throw LauncherException(message.c_str());
+ }
+
+ // Name
+ new_job->setJobName(job_parameters.job_name);
+
+ // Directories
+ new_job->setWorkDirectory(job_parameters.work_directory);
+ new_job->setLocalDirectory(job_parameters.local_directory);
+ new_job->setResultDirectory(job_parameters.result_directory);
+
+ // Parameters for COORM
+ new_job->setLauncherFile(job_parameters.launcher_file);
+ new_job->setLauncherArgs(job_parameters.launcher_args);
+
+ // Job File
+ new_job->setJobFile(job_parameters.job_file);
+ new_job->setPreCommand(job_parameters.pre_command);
+
+ // Files
+ new_job->setEnvFile(job_parameters.env_file);
+ for(const std::string param : job_parameters.in_files)
+ new_job->add_in_file(param);
+ for(const std::string param : job_parameters.out_files)
+ new_job->add_out_file(param);
+
+ new_job->setMaximumDuration(job_parameters.maximum_duration);
+ new_job->setQueue(job_parameters.queue);
+ new_job->setPartition(job_parameters.partition);
+ new_job->setExclusive(job_parameters.exclusive);
+ new_job->setMemPerCpu(job_parameters.mem_per_cpu);
+ new_job->setWCKey(job_parameters.wckey);
+ new_job->setExtraParams(job_parameters.extra_params);
+
+ // Resources requirements
+ new_job->setResourceRequiredParams(job_parameters.resource_required);
+
+ // Adding specific parameters to the job
+ for (const auto& it_specific : job_parameters.specific_parameters)
+ new_job->addSpecificParameter(it_specific.first, it_specific.second);
+
+ new_job->checkSpecificParameters();
+
+ createJob(new_job);
+
+ return new_job->getNumber();
+}
+
//=============================================================================
/*!
* Launch a job
* Get job state
*/
//=============================================================================
-const char *
+std::string
Launcher_cpp::getJobState(int job_id)
{
LAUNCHER_MESSAGE("Get job state");
throw LauncherException(ex.message.c_str());
}
- return state.c_str();
+ return state;
}
//=============================================================================
* Get job assigned hostnames
*/
//=============================================================================
-const char *
+std::string
Launcher_cpp::getAssignedHostnames(int job_id)
{
LAUNCHER_MESSAGE("Get job assigned hostnames");
Launcher::Job * job = findJob(job_id);
std::string assigned_hostnames = job->getAssignedHostnames();
- return assigned_hostnames.c_str();
+ return assigned_hostnames;
}
//=============================================================================
}
it_job->second->removeJob();
- it_job->second->decrRef();
+ delete it_job->second;
_launcher_job_map.erase(it_job);
}
Launcher_cpp::restoreJob(const std::string& dumpedJob)
{
LAUNCHER_MESSAGE("restore Job");
- auto JobDel = [] (Launcher::Job *job) { if(job) job->decrRef(); };
- std::unique_ptr<Launcher::Job, decltype(JobDel)> new_job(nullptr,JobDel);
+ Launcher::Job* new_job(nullptr);
int jobId = -1;
try
{
{
- new_job.reset(Launcher::XML_Persistence::createJobFromString(dumpedJob));
+ new_job = Launcher::XML_Persistence::createJobFromString(dumpedJob);
}
- if(new_job.get())
+ if(new_job)
{
- jobId = addJob(new_job.get());
+ jobId = addJob(new_job);
}
+ else
+ LAUNCHER_INFOS("Failed to create a new job.");
}
catch(const LauncherException &ex)
{
return jobId;
}
+JobParameters_cpp
+Launcher_cpp::getJobParameters(int job_id)
+{
+ Launcher::Job * job = findJob(job_id);
+ JobParameters_cpp job_parameters;
+ job_parameters.job_name = job->getJobName();
+ job_parameters.job_type = job->getJobType();
+ job_parameters.job_file = job->getJobFile();
+ job_parameters.env_file = job->getEnvFile();
+ job_parameters.work_directory = job->getWorkDirectory();
+ job_parameters.local_directory = job->getLocalDirectory();
+ job_parameters.result_directory = job->getResultDirectory();
+ job_parameters.pre_command = job->getPreCommand();
+
+ // Parameters for COORM
+ job_parameters.launcher_file = job->getLauncherFile();
+ job_parameters.launcher_args = job->getLauncherArgs();
+
+ job_parameters.in_files = job->get_in_files();
+ job_parameters.out_files = job->get_out_files();
+
+ job_parameters.maximum_duration = job->getMaximumDuration();
+ job_parameters.queue = job->getQueue();
+ job_parameters.partition = job->getPartition();
+ job_parameters.exclusive = job->getExclusive();
+ job_parameters.mem_per_cpu = job->getMemPerCpu();
+ job_parameters.wckey = job->getWCKey();
+ job_parameters.extra_params = job->getExtraParams();
+
+ resourceParams resource_params = job->getResourceRequiredParams();
+ job_parameters.resource_required.name = resource_params.name;
+ job_parameters.resource_required.hostname = resource_params.hostname;
+ job_parameters.resource_required.OS = resource_params.OS;
+ job_parameters.resource_required.nb_proc = resource_params.nb_proc;
+ job_parameters.resource_required.nb_node = resource_params.nb_node;
+ job_parameters.resource_required.nb_proc_per_node = resource_params.nb_proc_per_node;
+ job_parameters.resource_required.cpu_clock = resource_params.cpu_clock;
+ job_parameters.resource_required.mem_mb = resource_params.mem_mb;
+
+ job_parameters.specific_parameters = job->getSpecificParameters();
+
+ return job_parameters;
+}
+
//=============================================================================
/*!
* create a launcher job based on a file
ParserLauncherType job_params = ParseXmlFile(xmlExecuteFile);
// Creating a new job
- auto JobDel = [] (Launcher::Job *job) { if(job) job->decrRef(); };
- std::unique_ptr<Launcher::Job_Command, decltype(JobDel)> new_job(new Launcher::Job_Command,JobDel);
+ std::unique_ptr<Launcher::Job_Command> new_job(new Launcher::Job_Command);
std::string cmdFile = Kernel_Utils::GetTmpFileName();
#ifndef WIN32
"(libBatch was not present at compilation time)");
}
+int
+Launcher_cpp::createJob(const JobParameters_cpp& job_parameters)
+{
+ LAUNCHER_INFOS("Launcher compiled without LIBBATCH - cannot create a job !!!");
+ throw LauncherException("Method Launcher_cpp::createJob is not available "
+ "(libBatch was not present at compilation time)");
+}
+
void
Launcher_cpp::launchJob(int job_id)
{
"(libBatch was not present at compilation time)");
}
-const char *
+std::string
Launcher_cpp::getJobState(int job_id)
{
LAUNCHER_INFOS("Launcher compiled without LIBBATCH - cannot get job state!!!");
"(libBatch was not present at compilation time)");
}
-const char *
+std::string
Launcher_cpp::getAssignedHostnames(int job_id)
{
LAUNCHER_INFOS("Launcher compiled without LIBBATCH - cannot get job assigned hostnames!!!");
Batch::BatchManager*
Launcher_cpp::getBatchManager(Launcher::Job * job)
{
+ if(!_ResManager)
+ throw LauncherException("Resource manager is not set.");
+
Batch::BatchManager* result = nullptr;
int job_id = job->getNumber();
if (it_job == _launcher_job_map.end())
{
_launcher_job_map[new_job->getNumber()] = new_job;
- new_job->incrRef();
}
else
{
list<int>
Launcher_cpp::loadJobs(const char* jobs_file)
{
- auto JobDel = [] (Launcher::Job *job) { if(job) job->decrRef(); };
-
list<int> new_jobs_id_list;
// Load the jobs from XML file
list<Launcher::Job *>::const_iterator it_job;
for (it_job = jobs_list.begin(); it_job != jobs_list.end(); it_job++)
{
- std::unique_ptr<Launcher::Job, decltype(JobDel) > new_job(*it_job, JobDel);
+ Launcher::Job* new_job(*it_job);
int jobId = -1;
try
{
- jobId = addJob(new_job.get());
+ jobId = addJob(new_job);
if(jobId >= 0)
new_jobs_id_list.push_back(jobId);
}
#include <string>
#include <vector>
#include <list>
+#include <memory>
class MpiImpl;
class Job;
}
-struct batchParams{
- std::string batch_directory;
- std::string expected_during_time;
- std::string mem;
- unsigned long nb_proc;
-
- // Parameters for COORM
+struct LAUNCHER_EXPORT JobParameters_cpp
+{
+ std::string job_name;
+ std::string job_type;
+ std::string job_file;
+ std::string pre_command;
+ std::string env_file;
+ std::list<std::string> in_files;
+ std::list<std::string> out_files;
+ std::string work_directory;
+ std::string local_directory;
+ std::string result_directory;
+ std::string maximum_duration;
+ resourceParams resource_required;
+ std::string queue;
+ std::string partition;
+ bool exclusive;
+ unsigned int mem_per_cpu;
+ std::string wckey;
+ std::string extra_params;
+ std::map<std::string, std::string> specific_parameters;
std::string launcher_file;
std::string launcher_args;
};
class LAUNCHER_EXPORT Launcher_cpp
{
-
public:
Launcher_cpp();
virtual ~Launcher_cpp();
// Main interface
void createJob(Launcher::Job * new_job);
+ int createJob(const JobParameters_cpp& job_parameters);
void launchJob(int job_id);
- const char * getJobState(int job_id);
- const char * getAssignedHostnames(int job_id); // Get names or ids of hosts assigned to the job
+ std::string getJobState(int job_id);
+ std::string getAssignedHostnames(int job_id); // Get names or ids of hosts assigned to the job
void getJobResults(int job_id, std::string directory);
void clearJobWorkingDir(int job_id);
bool getJobDumpState(int job_id, std::string directory);
void removeJob(int job_id);
std::string dumpJob(int job_id);
int restoreJob(const std::string& dumpedJob);
+ JobParameters_cpp getJobParameters(int job_id);
/*! Load the jobs from the file "jobs_file" and add them to the Launcher.
* Return a list with the IDs of the jobs that were successfully loaded.
Launcher::Job * findJob(int job_id);
// Lib methods
- void SetResourcesManager( ResourcesManager_cpp* rm ) {_ResManager = rm;}
+ void SetResourcesManager( std::shared_ptr<ResourcesManager_cpp>& rm ) {_ResManager = rm;}
- // Used by SALOME_Launcher
- ResourcesManager_cpp *_ResManager = nullptr;
protected:
+ // Used by SALOME_Launcher
+ std::shared_ptr<ResourcesManager_cpp> _ResManager;
+
virtual void notifyObservers(const std::string & event_name, const std::string & event_data) {}
int addJob(Launcher::Job * new_job);
#endif
}
-bool Launcher::Job::decrRef() const
-{
- bool ret=((--_cnt)==0);
- if(ret)
- delete this;
- return ret;
-}
-
-void Launcher::Job::incrRef() const
-{
- _cnt++;
-}
-
void
Launcher::Job::stopJob()
{
{
class LAUNCHER_EXPORT Job
{
- protected:
- virtual ~Job();
public:
Job();
- int getRCValue() const { return _cnt; }
- bool decrRef() const;
- void incrRef() const;
+ virtual ~Job();
+
// Launcher managing parameters
// State of a Job: CREATED, IN_PROCESS, QUEUED, RUNNING, PAUSED, FINISHED, ERROR
void setState(const std::string & state);
{
class LAUNCHER_EXPORT Job_Command : virtual public Launcher::Job
{
- protected:
- virtual ~Job_Command();
public:
Job_Command();
+ virtual ~Job_Command();
virtual void update_job();
{
class LAUNCHER_EXPORT Job_CommandSALOME : virtual public Launcher::Job_Command
{
- private:
- virtual ~Job_CommandSALOME();
public:
Job_CommandSALOME();
+ virtual ~Job_CommandSALOME();
static const char TYPE_NAME[];
#ifdef WITH_LIBBATCH
{
class LAUNCHER_EXPORT Job_PythonSALOME : virtual public Launcher::Job_SALOME
{
- private:
- virtual ~Job_PythonSALOME();
public:
Job_PythonSALOME();
+ virtual ~Job_PythonSALOME();
virtual void setJobFile(const std::string & job_file);
virtual void addJobTypeSpecificScript(std::ofstream & launch_script_stream);
{
class LAUNCHER_EXPORT Job_SALOME : virtual public Launcher::Job
{
- protected:
- virtual ~Job_SALOME();
public:
Job_SALOME();
+ virtual ~Job_SALOME();
virtual void setResourceDefinition(const ParserResourcesType & resource_definition);
virtual void update_job();
{
class LAUNCHER_EXPORT Job_YACSFile : virtual public Launcher::Job_SALOME
{
- private:
- virtual ~Job_YACSFile();
public:
Job_YACSFile();
+ virtual ~Job_YACSFile();
virtual void setJobFile(const std::string & job_file);
virtual void addJobTypeSpecificScript(std::ofstream & launch_script_stream);
}
catch (const LauncherException & exc)
{
- new_job->decrRef();
+ delete new_job;
string error = string("Invalid job \"") + job_name + "\": " + exc.msg;
throw LauncherException(error);
}
CORBA::Long
SALOME_Launcher::createJob(const Engines::JobParameters & job_parameters)
{
- std::string job_type = job_parameters.job_type.in();
-
- Launcher::Job * new_job; // It is Launcher_cpp that is going to destroy it
-
- if (job_type == Launcher::Job_Command::TYPE_NAME)
- new_job = new Launcher::Job_Command();
- else if (job_type == Launcher::Job_CommandSALOME::TYPE_NAME)
- new_job = new Launcher::Job_CommandSALOME();
- else if (job_type == Launcher::Job_YACSFile::TYPE_NAME)
- new_job = new Launcher::Job_YACSFile();
- else if (job_type == Launcher::Job_PythonSALOME::TYPE_NAME)
- new_job = new Launcher::Job_PythonSALOME();
- else
- {
- std::string message("SALOME_Launcher::createJob: bad job type: ");
- message += job_type;
- THROW_SALOME_CORBA_EXCEPTION(message.c_str(), SALOME::INTERNAL_ERROR);
- }
-
- // Name
- new_job->setJobName(job_parameters.job_name.in());
-
- // Directories
- std::string work_directory = job_parameters.work_directory.in();
- std::string local_directory = job_parameters.local_directory.in();
- std::string result_directory = job_parameters.result_directory.in();
- new_job->setWorkDirectory(work_directory);
- new_job->setLocalDirectory(local_directory);
- new_job->setResultDirectory(result_directory);
-
- // Parameters for COORM
- std::string launcher_file = job_parameters.launcher_file.in();
- std::string launcher_args = job_parameters.launcher_args.in();
- new_job->setLauncherFile(launcher_file);
- new_job->setLauncherArgs(launcher_args);
-
- // Job File
- std::string job_file = job_parameters.job_file.in();
+ JobParameters_cpp cpp_parameters = JobParameters_CORBA2CPP(job_parameters);
+ CORBA::Long jobNumber = -1;
try
{
- new_job->setJobFile(job_file);
- }
- catch(const LauncherException &ex)
- {
- INFOS(ex.msg.c_str());
- THROW_SALOME_CORBA_EXCEPTION(ex.msg.c_str(),SALOME::INTERNAL_ERROR);
- }
- new_job->setPreCommand(job_parameters.pre_command.in());
-
- // Files
- std::string env_file = job_parameters.env_file.in();
- new_job->setEnvFile(env_file);
- for (CORBA::ULong i = 0; i < job_parameters.in_files.length(); i++)
- new_job->add_in_file(job_parameters.in_files[i].in());
- for (CORBA::ULong i = 0; i < job_parameters.out_files.length(); i++)
- new_job->add_out_file(job_parameters.out_files[i].in());
-
- // Expected During Time
- try
- {
- std::string maximum_duration = job_parameters.maximum_duration.in();
- new_job->setMaximumDuration(maximum_duration);
- }
- catch(const LauncherException &ex){
- INFOS(ex.msg.c_str());
- THROW_SALOME_CORBA_EXCEPTION(ex.msg.c_str(),SALOME::INTERNAL_ERROR);
- }
-
- // Queue
- std::string queue = job_parameters.queue.in();
- new_job->setQueue(queue);
-
- // Partition
- std::string partition = job_parameters.partition.in();
- new_job->setPartition(partition);
-
- // Exclusive
- new_job->setExclusive(job_parameters.exclusive);
-
- // Memory required per CPU
- new_job->setMemPerCpu(job_parameters.mem_per_cpu);
-
- // WC Key
- std::string wckey = job_parameters.wckey.in();
- new_job->setWCKey(wckey);
-
- // Extra params
- std::string extra_params = job_parameters.extra_params.in();
- new_job->setExtraParams(extra_params);
-
- // Resources requirements
- try
- {
- resourceParams p;
- p.name = job_parameters.resource_required.name;
- p.hostname = job_parameters.resource_required.hostname;
- p.OS = job_parameters.resource_required.OS;
- p.nb_proc = job_parameters.resource_required.nb_proc;
- p.nb_node = job_parameters.resource_required.nb_node;
- p.nb_proc_per_node = job_parameters.resource_required.nb_proc_per_node;
- p.cpu_clock = job_parameters.resource_required.cpu_clock;
- p.mem_mb = job_parameters.resource_required.mem_mb;
- new_job->setResourceRequiredParams(p);
- }
- catch(const LauncherException &ex){
- INFOS(ex.msg.c_str());
- THROW_SALOME_CORBA_EXCEPTION(ex.msg.c_str(),SALOME::INTERNAL_ERROR);
- }
-
- // Adding specific parameters to the job
- for (CORBA::ULong i = 0; i < job_parameters.specific_parameters.length(); i++)
- new_job->addSpecificParameter(job_parameters.specific_parameters[i].name.in(),
- job_parameters.specific_parameters[i].value.in());
- try
- {
- new_job->checkSpecificParameters();
- }
- catch(const LauncherException &ex)
- {
- INFOS(ex.msg.c_str());
- THROW_SALOME_CORBA_EXCEPTION(ex.msg.c_str(),SALOME::INTERNAL_ERROR);
- }
-
- try
- {
- _l.createJob(new_job);
+ jobNumber = _l.createJob(cpp_parameters);
std::ostringstream job_id;
- job_id << new_job->getNumber();
+ job_id << jobNumber;
notifyObservers("NEW_JOB", job_id.str());
}
catch(const LauncherException &ex)
INFOS(ex.msg.c_str());
THROW_SALOME_CORBA_EXCEPTION(ex.msg.c_str(),SALOME::BAD_PARAM);
}
- return new_job->getNumber();
+ return jobNumber;
}
void
Engines::JobParameters *
SALOME_Launcher::getJobParameters(CORBA::Long job_id)
{
- std::map<int, Launcher::Job *> cpp_jobs = _l.getJobs();
- std::map<int, Launcher::Job *>::const_iterator it_job = cpp_jobs.find(job_id);
- if (it_job == cpp_jobs.end())
- {
- INFOS("Cannot find the job, is it created ? job number: " << job_id);
- THROW_SALOME_CORBA_EXCEPTION("Job does not exist", SALOME::INTERNAL_ERROR);
- }
-
- Launcher::Job * job = it_job->second;
- Engines::JobParameters_var job_parameters = new Engines::JobParameters;
- job_parameters->job_name = CORBA::string_dup(job->getJobName().c_str());
- job_parameters->job_type = CORBA::string_dup(job->getJobType().c_str());
- job_parameters->job_file = CORBA::string_dup(job->getJobFile().c_str());
- job_parameters->env_file = CORBA::string_dup(job->getEnvFile().c_str());
- job_parameters->work_directory = CORBA::string_dup(job->getWorkDirectory().c_str());
- job_parameters->local_directory = CORBA::string_dup(job->getLocalDirectory().c_str());
- job_parameters->result_directory = CORBA::string_dup(job->getResultDirectory().c_str());
- job_parameters->pre_command = CORBA::string_dup(job->getPreCommand().c_str());
-
- // Parameters for COORM
- job_parameters->launcher_file = CORBA::string_dup(job->getLauncherFile().c_str());
- job_parameters->launcher_args = CORBA::string_dup(job->getLauncherArgs().c_str());
-
- int i = 0;
- int j = 0;
- std::list<std::string> in_files = job->get_in_files();
- std::list<std::string> out_files = job->get_out_files();
- job_parameters->in_files.length(in_files.size());
- for(std::list<std::string>::iterator it = in_files.begin(); it != in_files.end(); it++)
- {
- job_parameters->in_files[i] = CORBA::string_dup((*it).c_str());
- i++;
- }
- job_parameters->out_files.length(out_files.size());
- for(std::list<std::string>::iterator it = out_files.begin(); it != out_files.end(); it++)
+ Engines::JobParameters_var job_parameters;
+ try
{
- job_parameters->out_files[j] = CORBA::string_dup((*it).c_str());
- j++;
+ JobParameters_cpp cpp_parameters = _l.getJobParameters(job_id);
+ job_parameters = JobParameters_CPP2CORBA(cpp_parameters);
}
-
- job_parameters->maximum_duration = CORBA::string_dup(job->getMaximumDuration().c_str());
- job_parameters->queue = CORBA::string_dup(job->getQueue().c_str());
- job_parameters->partition = CORBA::string_dup(job->getPartition().c_str());
- job_parameters->exclusive = job->getExclusive();
- job_parameters->mem_per_cpu = job->getMemPerCpu();
- job_parameters->wckey = CORBA::string_dup(job->getWCKey().c_str());
- job_parameters->extra_params = CORBA::string_dup(job->getExtraParams().c_str());
-
- resourceParams resource_params = job->getResourceRequiredParams();
- job_parameters->resource_required.name = CORBA::string_dup(resource_params.name.c_str());
- job_parameters->resource_required.hostname = CORBA::string_dup(resource_params.hostname.c_str());
- job_parameters->resource_required.OS = CORBA::string_dup(resource_params.OS.c_str());
- job_parameters->resource_required.nb_proc = resource_params.nb_proc;
- job_parameters->resource_required.nb_node = resource_params.nb_node;
- job_parameters->resource_required.nb_proc_per_node = resource_params.nb_proc_per_node;
- job_parameters->resource_required.cpu_clock = resource_params.cpu_clock;
- job_parameters->resource_required.mem_mb = resource_params.mem_mb;
-
- std::map<std::string, std::string> specific_parameters = job->getSpecificParameters();
- if (!specific_parameters.empty())
+ catch(const LauncherException &ex)
{
- job_parameters->specific_parameters.length(specific_parameters.size());
- std::map<std::string, std::string>::const_iterator it_specific;
- CORBA::ULong i = 0;
- for (it_specific = specific_parameters.begin() ; it_specific != specific_parameters.end(); it_specific++)
- {
- Engines::Parameter_var new_param = new Engines::Parameter;
- new_param->name = CORBA::string_dup((it_specific->first).c_str());
- new_param->value = CORBA::string_dup((it_specific->second).c_str());
- job_parameters->specific_parameters[i] = new_param;
- i++;
- }
+ INFOS(ex.msg.c_str());
+ THROW_SALOME_CORBA_EXCEPTION(ex.msg.c_str(),SALOME::BAD_PARAM);
}
return job_parameters._retn();
}
}
+
+JobParameters_cpp
+SALOME_Launcher::JobParameters_CORBA2CPP(
+ const Engines::JobParameters& job_parameters)
+{
+ JobParameters_cpp result;
+
+ result.job_name = job_parameters.job_name.in();
+ result.job_type = job_parameters.job_type.in();
+ result.job_file = job_parameters.job_file.in();
+ result.pre_command = job_parameters.pre_command.in();
+ result.env_file = job_parameters.env_file.in();
+
+ result.in_files.clear();
+ for (CORBA::ULong i = 0; i < job_parameters.in_files.length(); i++)
+ result.in_files.push_back(job_parameters.in_files[i].in());
+ result.out_files.clear();
+ for (CORBA::ULong i = 0; i < job_parameters.out_files.length(); i++)
+ result.out_files.push_back(job_parameters.out_files[i].in());
+
+ result.work_directory = job_parameters.work_directory.in();
+ result.local_directory = job_parameters.local_directory.in();
+ result.result_directory = job_parameters.result_directory.in();
+ result.maximum_duration = job_parameters.maximum_duration.in();
+
+ result.resource_required = resourceParameters_CORBAtoCPP(job_parameters.resource_required);
+
+ result.queue = job_parameters.queue.in();
+ result.partition = job_parameters.partition.in();
+ result.exclusive = job_parameters.exclusive;
+ result.mem_per_cpu = job_parameters.mem_per_cpu;
+ result.wckey = job_parameters.wckey.in();
+ result.extra_params = job_parameters.extra_params.in();
+
+ result.specific_parameters.clear();
+ for (CORBA::ULong i = 0; i < job_parameters.specific_parameters.length(); i++)
+ result.specific_parameters[job_parameters.specific_parameters[i].name.in()]
+ = job_parameters.specific_parameters[i].value.in();
+
+ result.launcher_file = job_parameters.launcher_file.in();
+ result.launcher_args = job_parameters.launcher_args.in();
+ return result;
+}
+
+Engines::JobParameters_var
+SALOME_Launcher::JobParameters_CPP2CORBA(const JobParameters_cpp& job_parameters)
+{
+ Engines::JobParameters_var result = new Engines::JobParameters;
+ result->job_name = CORBA::string_dup(job_parameters.job_name.c_str());
+ result->job_type = CORBA::string_dup(job_parameters.job_type.c_str());
+ result->job_file = CORBA::string_dup(job_parameters.job_file.c_str());
+ result->pre_command = CORBA::string_dup(job_parameters.pre_command.c_str());
+ result->env_file = CORBA::string_dup(job_parameters.env_file.c_str());
+ result->in_files.length(job_parameters.in_files.size());
+
+ int i = 0;
+ for(const std::string& it : job_parameters.in_files)
+ {
+ result->in_files[i] = CORBA::string_dup(it.c_str());
+ i++;
+ }
+ result->out_files.length(job_parameters.out_files.size());
+ i = 0;
+ for(const std::string& it : job_parameters.out_files)
+ {
+ result->out_files[i] = CORBA::string_dup(it.c_str());
+ i++;
+ }
+
+ result->work_directory = CORBA::string_dup(job_parameters.work_directory.c_str());
+ result->local_directory = CORBA::string_dup(job_parameters.local_directory.c_str());
+ result->result_directory = CORBA::string_dup(job_parameters.result_directory.c_str());
+ result->maximum_duration = CORBA::string_dup(job_parameters.maximum_duration.c_str());
+
+ result->resource_required = resourceParameters_CPPtoCORBA(job_parameters.resource_required);
+
+ result->queue = CORBA::string_dup(job_parameters.queue.c_str());
+ result->partition = CORBA::string_dup(job_parameters.partition.c_str());
+ result->exclusive = job_parameters.exclusive;
+ result->mem_per_cpu = job_parameters.mem_per_cpu;
+ result->wckey = CORBA::string_dup(job_parameters.wckey.c_str());
+ result->extra_params = CORBA::string_dup(job_parameters.extra_params.c_str());
+
+ const std::map<std::string, std::string>& specific_parameters
+ = job_parameters.specific_parameters;
+ if (!specific_parameters.empty())
+ {
+ result->specific_parameters.length(specific_parameters.size());
+ CORBA::ULong i = 0;
+ for (const auto& it_specific : specific_parameters)
+ {
+ Engines::Parameter_var new_param = new Engines::Parameter;
+ new_param->name = CORBA::string_dup(it_specific.first.c_str());
+ new_param->value = CORBA::string_dup(it_specific.second.c_str());
+ result->specific_parameters[i] = new_param;
+ i++;
+ }
+ }
+
+ result->launcher_file = CORBA::string_dup(job_parameters.launcher_file.c_str());
+ result->launcher_args = CORBA::string_dup(job_parameters.launcher_args.c_str());
+ return result;
+}
static const char *_LauncherNameInNS;
+ static JobParameters_cpp
+ JobParameters_CORBA2CPP(const Engines::JobParameters& job_parameters);
+
+ static Engines::JobParameters_var
+ JobParameters_CPP2CORBA(const JobParameters_cpp& job_parameters);
+
protected:
// Internal methods
virtual void notifyObservers(const std::string & event_name, const std::string & event_data);
#include "Launcher.hxx"
#include <iostream>
#include <string>
+#include <memory>
int main(int argc, char** argv)
{
try {
Launcher_cpp *lcpp = new Launcher_cpp();
- ResourcesManager_cpp *rcpp = new ResourcesManager_cpp();
+ std::shared_ptr<ResourcesManager_cpp> rcpp(new ResourcesManager_cpp());
lcpp->SetResourcesManager(rcpp);
if(!getenv("KERNEL_ROOT_DIR"))
throw ResourcesException("you must define KERNEL_ROOT_DIR environment variable!! -> cannot load testLauncher.xml");
long jobid = lcpp->createJobWithFile(xmlfile.c_str(),"localhost");
lcpp->launchJob(jobid);
delete lcpp;
- delete rcpp;
std::cout << "test OK" << std::endl;
} catch ( const ResourcesException &ex) {
std::cout << ex.msg.c_str() << std::endl;
SET(PYFILES_TO_INSTALL ${PYFILES_TO_INSTALL} ${CMAKE_CURRENT_BINARY_DIR}/pylauncher.py)
SALOME_INSTALL_SCRIPTS("${PYFILES_TO_INSTALL}" ${SALOME_INSTALL_PYTHON} EXTRA_DPYS "${SWIG_MODULE_${Launcher_target_name}_REAL_NAME}")
+
+IF(SALOME_BUILD_TESTS)
+ ADD_SUBDIRECTORY(Test)
+ENDIF(SALOME_BUILD_TESTS)
%module pylauncher
%{
-#include "SALOME_ResourcesCatalog_Parser.hxx"
-#include "Launcher_Job.hxx"
-#include "Launcher_Job_SALOME.hxx"
-#include "Launcher_Job_YACSFile.hxx"
#include "Launcher.hxx"
#include "ResourcesManager.hxx"
-static PyObject *convertJob(Launcher::Job *job, int owner)
+struct ResourceDefinition_cpp
{
- PyObject *ret(nullptr);
- if(!job)
- {
- Py_XINCREF(Py_None);
- return Py_None;
- }
- if(dynamic_cast<Launcher::Job_YACSFile *>(job))
- return SWIG_NewPointerObj((void*)dynamic_cast<Launcher::Job_YACSFile *>(job),SWIGTYPE_p_Launcher__Job_YACSFile,owner);
- throw LauncherException("Not recognized type of job on downcast !");
-}
-
+public:
+ std::string name;
+ std::string hostname;
+ std::string type;
+ std::string protocol;
+ std::string username;
+ std::string applipath;
+ std::string OS;
+ int mem_mb;
+ int cpu_clock;
+ int nb_node;
+ int nb_proc_per_node;
+ std::string batch;
+ std::string mpiImpl;
+ std::string iprotocol;
+ bool can_launch_batch_jobs;
+ bool can_run_containers;
+ std::string working_directory;
+};
%}
-%include std_string.i
-
-%typemap(out) const std::list<std::string>&
-{
- std::size_t i;
- std::list<std::string>::const_iterator iL;
- $result = PyList_New($1->size());
- for (i=0, iL=$1->cbegin(); iL!=$1->cend(); i++, iL++)
- PyList_SetItem($result,i,PyUnicode_FromString((*iL).c_str()));
-}
-
-%typemap(out) const std::map<std::string, std::string> &
-{
- $result = PyDict_New();
- for(std::map<std::string, std::string>::const_iterator iL=$1->cbegin();iL!=$1->cend();iL++)
- {
- PyObject *a(PyUnicode_FromString((*iL).first.c_str()));
- PyObject *b(PyUnicode_FromString((*iL).second.c_str()));
- PyDict_SetItem($result,a,b);
- Py_DECREF(a); Py_DECREF(b);
- }
-}
-
-%typemap(out) std::vector<std::string> *
-{
- std::size_t i;
- std::vector<std::string>::const_iterator iL;
- $result = PyList_New($1->size());
- for (i=0, iL=$1->cbegin(); iL!=$1->cend(); i++, iL++)
- PyList_SetItem($result,i,PyUnicode_FromString((*iL).c_str()));
-}
+%include "std_string.i"
+%include "std_vector.i"
+%include "std_list.i"
+%include "std_map.i"
-%typemap(out) Launcher::Job *
-{
- $result=convertJob($1,$owner);
-}
-
-%exception
-{
- try {
- $function
- }
- catch(LauncherException& e)
- {
- SWIG_exception_fail(SWIG_RuntimeError,e.msg.c_str());
- }
- catch (...)
- {
- SWIG_exception_fail(SWIG_UnknownError, "Unknown");
- }
-}
+namespace std {
+ %template(list_int) list<int>;
+ %template(list_str) list<string>;
+ %template(vector_str) vector<string>;
+ %template(map_ss) map<string,string>;
+};
+// see ResourceParameters from SALOME_ResourcesManager.idl
+// see resourceParams from ResourcesManager.hxx
+%naturalvar JobParameters_cpp::componentList;
+%naturalvar JobParameters_cpp::resourceList;
struct resourceParams
{
resourceParams();
std::vector<std::string> resourceList;
};
-class ResourcesManager_cpp
+// see JobParameters from SALOME_Launcher.idl
+// see JobParameters_cpp from Launcher.hxx
+%naturalvar JobParameters_cpp::in_files;
+%naturalvar JobParameters_cpp::out_files;
+%naturalvar JobParameters_cpp::specific_parameters;
+struct JobParameters_cpp
{
- public:
- ResourcesManager_cpp(const char *xmlFilePath);
+public:
+ std::string job_name;
+ std::string job_type;
+ std::string job_file;
+ std::string pre_command;
+ std::string env_file;
+ std::list<std::string> in_files;
+ std::list<std::string> out_files;
+ std::string work_directory;
+ std::string local_directory;
+ std::string result_directory;
+ std::string maximum_duration;
+ resourceParams resource_required;
+ std::string queue;
+ std::string partition;
+ bool exclusive;
+ unsigned int mem_per_cpu;
+ std::string wckey;
+ std::string extra_params;
+ std::map<std::string, std::string> specific_parameters;
+ std::string launcher_file;
+ std::string launcher_args;
};
-class ParserResourcesType
+// see ResourceDefinition from SALOME_ResourcesManager.idl
+// no other c++ equivalent. Convertion from ParserResourcesType
+struct ResourceDefinition_cpp
{
public:
- ParserResourcesType();
- ~ParserResourcesType();
- std::string getAccessProtocolTypeStr() const;
- std::string getResourceTypeStr() const;
- std::string getBatchTypeStr() const;
- std::string getMpiImplTypeStr() const;
- std::string getClusterInternalProtocolStr() const;
- std::string getCanLaunchBatchJobsStr() const;
- std::string getCanRunContainersStr() const;
-
- void setAccessProtocolTypeStr(const std::string & protocolTypeStr);
- void setResourceTypeStr(const std::string & resourceTypeStr);
- void setBatchTypeStr(const std::string & batchTypeStr);
- void setMpiImplTypeStr(const std::string & mpiImplTypeStr);
- void setClusterInternalProtocolStr(const std::string & internalProtocolTypeStr);
- void setCanLaunchBatchJobsStr(const std::string & canLaunchBatchJobsStr);
- void setCanRunContainersStr(const std::string & canRunContainersStr);
+ std::string name;
+ std::string hostname;
+ std::string type;
+ std::string protocol;
+ std::string username;
+ std::string applipath;
+ std::string OS;
+ int mem_mb;
+ int cpu_clock;
+ int nb_node;
+ int nb_proc_per_node;
+ std::string batch;
+ std::string mpiImpl;
+ std::string iprotocol;
+ bool can_launch_batch_jobs;
+ bool can_run_containers;
+ std::string working_directory;
};
-%feature("unref") Launcher::Job "$this->decrRef();"
-%feature("unref") Launcher::Job_SALOME "$this->decrRef();"
-%feature("unref") Launcher::Job_YACSFile "$this->decrRef();"
-
-namespace Launcher
+%exception
{
- class Job
+ try
{
- public:
- Job();
- virtual ~Job();
- void setState(const std::string & state);
- std::string getState() const;
- std::string getAssignedHostnames();
- void setNumber(const int & number);
- int getNumber();
- virtual void setResourceDefinition(const ParserResourcesType & resource_definition);
- ParserResourcesType getResourceDefinition() const;
- // Common parameters
- void setJobName(const std::string & job_name);
- virtual void setJobFile(const std::string & job_file);
- void setPreCommand(const std::string & preCommand);
- void setWorkDirectory(const std::string & work_directory);
- void setLocalDirectory(const std::string & local_directory);
- void setResultDirectory(const std::string & result_directory);
- void add_in_file(const std::string & file);
- void add_out_file(const std::string & file);
- void setMaximumDuration(const std::string & maximum_duration);
- void setResourceRequiredParams(const resourceParams & resource_required_params);
- void setQueue(const std::string & queue);
- void setPartition(const std::string & partition);
- void setEnvFile(const std::string & env_file);
- void setExclusive(bool exclusive);
- void setExclusiveStr(const std::string & exclusiveStr);
- void setMemPerCpu(unsigned long mem_per_cpu);
- void setWCKey(const std::string & wckey);
- void setExtraParams(const std::string & extra_params);
- void setReference(const std::string & reference);
- // For COORM
- void setLauncherFile(const std::string & launcher_file);
- void setLauncherArgs(const std::string & launcher_args);
-
- std::string getJobName() const;
- std::string getJobFile() const;
- std::string getPreCommand() const;
- std::string getWorkDirectory() const;
- std::string getLocalDirectory() const;
- std::string getResultDirectory() const;
- const std::list<std::string> & get_in_files() const;
- const std::list<std::string> & get_out_files() const;
- std::string getMaximumDuration() const;
- resourceParams getResourceRequiredParams() const;
- std::string getQueue() const;
- std::string getPartition() const;
- std::string getEnvFile() const;
- std::string getJobType() const;
- bool getExclusive() const;
- std::string getExclusiveStr() const;
- unsigned long getMemPerCpu() const;
- std::string getWCKey() const;
- std::string getExtraParams() const;
- std::string getReference() const;
- std::string getLauncherFile() const;
- std::string getLauncherArgs() const;
- std::string updateJobState();
-
- void addSpecificParameter(const std::string & name, const std::string & value);
- const std::map<std::string, std::string> & getSpecificParameters() const;
- virtual void checkSpecificParameters();
-
- // Checks
- void checkMaximumDuration(const std::string & maximum_duration);
- void checkResourceRequiredParams(const resourceParams & resource_required_params);
-
- // Helps
- long convertMaximumDuration(const std::string & maximum_duration);
- std::string getLaunchDate() const;
+ $function
+ }
+ catch (ResourcesException& e)
+ {
+ SWIG_exception_fail(SWIG_RuntimeError, e.msg.c_str());
+ }
+ catch(...)
+ {
+ SWIG_exception_fail(SWIG_RuntimeError,"Unknown exception");
+ }
+}
- void stopJob();
- void removeJob();
- virtual void update_job() = 0;
- };
+%include <std_shared_ptr.i>
+%shared_ptr(ResourcesManager_cpp)
- class Job_SALOME : public Job
+class ResourcesManager_cpp
+{
+public:
+ ResourcesManager_cpp(const char *xmlFilePath);
+ std::vector<std::string> GetFittingResources(const resourceParams& params);
+%extend
+{
+ ResourceDefinition_cpp GetResourceDefinition(const std::string& name)
{
- private:
- Job_SALOME();
- public:
- virtual ~Job_SALOME();
- virtual void setResourceDefinition(const ParserResourcesType & resource_definition);
- virtual void update_job();
-#ifdef WITH_LIBBATCH
- public:
- std::string buildSalomeScript(Batch::Parametre params);
-#endif
- };
+ ResourceDefinition_cpp swig_result;
+ ParserResourcesType cpp_result = $self->GetResourcesDescr(name);
+
+ swig_result.name = cpp_result.Name;
+ swig_result.hostname = cpp_result.HostName;
+ swig_result.type = cpp_result.getResourceTypeStr();
+ swig_result.protocol = cpp_result.getAccessProtocolTypeStr();
+ swig_result.username = cpp_result.UserName;
+ swig_result.applipath = cpp_result.AppliPath;
+ swig_result.OS = cpp_result.OS;
+ swig_result.mem_mb = cpp_result.DataForSort._memInMB;
+ swig_result.cpu_clock = cpp_result.DataForSort._CPUFreqMHz;
+ swig_result.nb_node = cpp_result.DataForSort._nbOfNodes;
+ swig_result.nb_proc_per_node = cpp_result.DataForSort._nbOfProcPerNode;
+ swig_result.batch = cpp_result.getBatchTypeStr();
+ swig_result.mpiImpl = cpp_result.getMpiImplTypeStr();
+ swig_result.iprotocol = cpp_result.getClusterInternalProtocolStr();
+ swig_result.can_launch_batch_jobs = cpp_result.can_launch_batch_jobs;
+ swig_result.can_run_containers = cpp_result.can_run_containers;
+ swig_result.working_directory = cpp_result.working_directory;
+
+ return swig_result;
+ }
+}
+};
- class Job_YACSFile : public Job_SALOME
+%exception
+{
+ try
+ {
+ $function
+ }
+ catch (LauncherException& e)
+ {
+ SWIG_exception_fail(SWIG_RuntimeError, e.msg.c_str());
+ }
+ catch(...)
{
- public:
- Job_YACSFile();
- virtual ~Job_YACSFile();
- virtual void setJobFile(const std::string & job_file);
- virtual void checkSpecificParameters();
- };
+ SWIG_exception_fail(SWIG_RuntimeError,"Unknown exception");
+ }
}
class Launcher_cpp
public:
Launcher_cpp();
virtual ~Launcher_cpp();
- void createJob(Launcher::Job * new_job);
+ int createJob(const JobParameters_cpp& job_parameters);
void launchJob(int job_id);
- const char * getJobState(int job_id);
- const char * getAssignedHostnames(int job_id); // Get names or ids of hosts assigned to the job
+ std::string getJobState(int job_id);
+ std::string getAssignedHostnames(int job_id); // Get names or ids of hosts assigned to the job
void getJobResults(int job_id, std::string directory);
void clearJobWorkingDir(int job_id);
bool getJobDumpState(int job_id, std::string directory);
void removeJob(int job_id);
std::string dumpJob(int job_id);
int restoreJob(const std::string& dumpedJob);
+ JobParameters_cpp getJobParameters(int job_id);
std::list<int> loadJobs(const char* jobs_file);
void saveJobs(const char* jobs_file);
long createJobWithFile(std::string xmlExecuteFile, std::string clusterName);
- std::map<int, Launcher::Job *> getJobs();
- void addJobDirectlyToMap(Launcher::Job * new_job);
- void SetResourcesManager( ResourcesManager_cpp *rm );
- Launcher::Job * findJob(int job_id);
+ void SetResourcesManager(std::shared_ptr<ResourcesManager_cpp>& rm );
};
-
-%pythoncode %{
-def sendJobToSession(self, job_id, sessionId=None):
- """Send job specified by its job_id in self to a remote SALOME session.
- Doing so, it's possible to follow the job created locally into the JobManager module of the target SALOME session.
- SALOME session is specified by the file pointed by the content of OMNIORB_CONFIG environement var. The content of this var is called sessionId.
- If sessionId is let untouched, the current OMNIORB_CONFIG environement var is used.
- If this method fails to connect to the target SALOME session a RuntimeError exception will be thrown.
- """
- def job_type_traducer(jyf):
- dico = {'Job_YACSFile' : 'yacs_file'}
- st = type(jyf).__name__
- if st not in dico:
- raise RuntimeError("Not recognized type %s !"%st)
- return dico[st]
- #
- def resource_required_func(jyf):
- import Engines
- rp =jyf.getResourceRequiredParams()
- l12 = [('name', None), ('hostname', None), ('can_launch_batch_jobs', None), ('can_run_containers', None), ('OS', None), ('componentList', None), ('nb_proc', None), ('mem_mb', None), ('cpu_clock', None), ('nb_node', None), ('nb_proc_per_node', None), ('policy', lambda x: 'cycl'), ('resList', lambda x: x.resourceList)]
- kw2={}
- for a,b in l12:
- if a and not b:
- kw2[a]=getattr(rp,a)
- else:
- if a and b:
- kw2[a]=b(rp)
- return Engines.ResourceParameters(**kw2)
- #
- filest = self.dumpJob(job_id);
- # Connect to SALOME session a retrieve its SalomeLauncher object
- import os
- if sessionId is not None:
- os.environ["OMNIORB_CONFIG"]=sessionId
- import Engines
- import orbmodule
- try:
- clt=orbmodule.client()
- sl = clt.Resolve("SalomeLauncher")
- except:
- raise RuntimeError("Fail to connect to the remote SALOME session.")
- # swig to CORBA translation
- # Job_YACSFile -> Engines.JobParameters and resourceParams -> Engines.ResourceParameters()
- l21= [('job_name', None), ('job_type', job_type_traducer), ('job_file', None), ('pre_command', None), ('env_file', None), ('in_files', lambda x: x.get_in_files()), ('out_files', lambda x: x.get_out_files()), ('work_directory', None), ('local_directory', None), ('result_directory', None), ('maximum_duration', None), ('resource_required',resource_required_func) , ('queue', None), ('partition', None), ('exclusive', None), ('mem_per_cpu', None), ('wckey', lambda x: x.getWCKey() ), ('extra_params', None), ('specific_parameters', lambda x: list(x.getSpecificParameters().items())), ('launcher_file', None), ('launcher_args', None)]
- kw={}
- jyf = self.findJob(job_id)
- for a,b in l21:
- if not b and a:
- kw[a]=eval("jyf.get%s()"%"".join(["%s%s"%(elt2[0].upper(),elt2[1:]) for elt2 in a.split("_")]),{"jyf" : jyf})
- else:
- if a and b:
- kw[a]=b(jyf)
- jyc = Engines.JobParameters(**kw)
- ########################
- bpc = sl.createJob(jyc)
- sl.restoreJob(filest)
-
-Launcher_cpp.sendJobToSession = sendJobToSession
-del sendJobToSession
-%}
--- /dev/null
+# Copyright (C) 2012-2019 CEA/DEN, EDF R&D, OPEN CASCADE
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+# --- rules ---
+
+IF(NOT WIN32)
+# ** Now in CTestTestfileInstall.cmake **
+# ** In this file only remain unit tests (no SALOME session is needed) **
+# ADD_TEST(NAME SalomeLauncher
+# COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/../../UnitTests/prepare_test.py
+# ${CMAKE_CURRENT_SOURCE_DIR}/test_launcher.py
+# -d KERNEL_ROOT_DIR=${CMAKE_INSTALL_PREFIX}
+# )
+ INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/test_swig_launcher.py
+ DESTINATION ${KERNEL_TEST_DIR}/Launcher_SWIG)
+
+ INSTALL(FILES CTestTestfileInstall.cmake
+ DESTINATION ${KERNEL_TEST_DIR}/Launcher_SWIG
+ RENAME CTestTestfile.cmake)
+
+ INSTALL(PROGRAMS ${STRESS_TEST_FILES}
+ DESTINATION ${KERNEL_TEST_DIR}/Launcher_SWIG)
+ENDIF()
--- /dev/null
+# Copyright (C) 2015-2019 CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+IF(NOT WIN32)
+ SET(TEST_NAME ${COMPONENT_NAME}_Launcher_SWIG)
+ ADD_TEST(${TEST_NAME} python test_swig_launcher.py)
+ SET_TESTS_PROPERTIES(${TEST_NAME} PROPERTIES LABELS "${COMPONENT_NAME}"
+ # TIMEOUT 500
+ )
+
+ # /!\ DO NOT SET TIMEOUT PROPERTY IF USING ${SALOME_TEST_DRIVER}
+ # BUT PASS TIMEOUT VALUE TO THE DRIVER
+
+ENDIF()
--- /dev/null
+#! /usr/bin/env python3
+# -*- coding: utf-8 -*-
+# Copyright (C) 2014-2019 CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+import unittest
+import os
+import sys
+import time
+import tempfile
+import errno
+
+def mkdir_p(path):
+ try:
+ os.makedirs(path)
+ except OSError as exc: # Python >2.5
+ if exc.errno == errno.EEXIST and os.path.isdir(path):
+ pass
+ else:
+ raise
+
+import pylauncher
+def createLauncher():
+ launcher = pylauncher.Launcher_cpp()
+ launcher.SetResourcesManager(createResourcesManager())
+ return launcher
+
+def createResourcesManager():
+ # localhost is defined anyway, even if the catalog file does not exist.
+ catalog_path = os.environ.get("USER_CATALOG_RESOURCES_FILE", "")
+ return pylauncher.ResourcesManager_cpp(catalog_path)
+
+def createJobParameters():
+ jp = pylauncher.JobParameters_cpp()
+ jp.resource_required = createResourceParameters()
+ return jp
+
+def createResourceParameters():
+ return pylauncher.resourceParams()
+
+# Test of SalomeLauncher.
+# This test should be run in the salome environment, using "salome shell".
+# It does not need a salome application running.
+# The test will try to launch batch jobs on every available resources which
+# have the can_launch_batch_jobs parameter set to True.
+# You can use the environment variable USER_CATALOG_RESOURCES_FILE in order to
+# define a customised resource catalog.
+# If YACS_ROOT_DIR is not set, the test of submitting a YACS schema will be
+# skipped.
+class TestCompo(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ # Prepare the test directory
+ temp = tempfile.NamedTemporaryFile()
+ cls.test_dir = os.path.join(temp.name, "test_dir")
+ name = os.path.basename(temp.name)
+ temp.close()
+ cls.suffix = time.strftime("-%Y-%m-%d-%H-%M-%S")+"-%s"%(os.getpid())
+ mkdir_p(cls.test_dir)
+
+ # Get the list of possible ressources
+ ressource_param = createResourceParameters()
+ ressource_param.can_launch_batch_jobs = True
+ rm = createResourcesManager()
+ cls.ressources = rm.GetFittingResources(ressource_param)
+
+ def verifyFile(self, path, content):
+ try:
+ f = open(path, 'r')
+ text = f.read()
+ f.close()
+ self.assertEqual(text, content)
+ except IOError as ex:
+ self.fail("IO exception:" + str(ex));
+
+ def create_JobParameters(self):
+ job_params = createJobParameters()
+ job_params.wckey="P11U5:CARBONES" #needed by edf clusters
+ job_params.resource_required.nb_proc = 1
+ return job_params
+
+ ##############################
+ # test of python_salome job
+ ##############################
+ def test_salome_py_job(self):
+ case_test_dir = os.path.join(TestCompo.test_dir, "salome_py")
+ mkdir_p(case_test_dir)
+
+ old_dir = os.getcwd()
+ os.chdir(case_test_dir)
+
+ # job script
+ script_file = "myScript.py"
+ job_script_file = os.path.join(case_test_dir, script_file)
+ script_text = """#! /usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+# verify import salome
+import salome
+salome.salome_init()
+
+f = open('result.txt', 'w')
+f.write("Salut!")
+f.close()
+
+import os
+os.mkdir("subdir")
+f = open(os.path.join("subdir",'autre.txt'), 'w')
+f.write("Hello!")
+f.close()
+"""
+ f = open(job_script_file, "w")
+ f.write(script_text)
+ f.close()
+
+ local_result_dir = os.path.join(case_test_dir, "result_py_job-")
+ job_params = self.create_JobParameters()
+ job_params.job_type = "python_salome"
+ job_params.job_file = job_script_file
+ job_params.in_files = []
+ job_params.out_files = ["result.txt", "subdir"]
+
+ launcher = createLauncher()
+
+ for resource in self.ressources:
+ print("Testing python_salome job on ", resource)
+ job_params.result_directory = local_result_dir + resource
+ job_params.job_name = "PyJob" + resource
+ job_params.resource_required.name = resource
+ # use default working directory for this test
+
+ job_id = launcher.createJob(job_params)
+ launcher.launchJob(job_id)
+
+ jobState = launcher.getJobState(job_id)
+ print("Job %d state: %s" % (job_id,jobState))
+ while jobState != "FINISHED" and jobState != "FAILED" :
+ time.sleep(5)
+ jobState = launcher.getJobState(job_id)
+ print("Job %d state: %s" % (job_id,jobState))
+ pass
+
+ self.assertEqual(jobState, "FINISHED")
+
+ # getJobResults to default directory (result_directory)
+ launcher.getJobResults(job_id, "")
+ self.verifyFile(os.path.join(job_params.result_directory, "result.txt"),
+ "Salut!")
+ self.verifyFile(os.path.join(job_params.result_directory,
+ "subdir", "autre.txt"),
+ "Hello!")
+
+ # getJobResults to a specific directory
+ mydir = os.path.join(case_test_dir, "custom_result_dir" + resource)
+ launcher.getJobResults(job_id, mydir)
+ self.verifyFile(os.path.join(mydir, "result.txt"), "Salut!")
+ self.verifyFile(os.path.join(mydir, "subdir", "autre.txt"), "Hello!")
+ pass #for
+
+ os.chdir(old_dir)
+
+ ##############################
+ # test of command job type
+ ##############################
+ def test_command(self):
+ case_test_dir = os.path.join(TestCompo.test_dir, "command")
+ mkdir_p(case_test_dir)
+
+ # job script
+ data_file = "in.txt"
+ script_file = "myEnvScript.py"
+ script_text = """#! /usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+import os,sys
+
+text_result = os.getenv("ENV_TEST_VAR","")
+
+f = open('result.txt', 'w')
+f.write(text_result)
+f.close()
+
+in_f = open("in.txt", "r")
+in_text = in_f.read()
+in_f.close()
+
+os.mkdir("copie")
+f = open(os.path.join("copie",'copie.txt'), 'w')
+f.write(in_text)
+f.close()
+"""
+ abs_script_file = os.path.join(case_test_dir, script_file)
+ f = open(abs_script_file, "w")
+ f.write(script_text)
+ f.close()
+ os.chmod(abs_script_file, 0o755)
+
+ #environment script
+ env_file = "myEnv.sh"
+ env_text = """export ENV_TEST_VAR="expected"
+"""
+ f = open(os.path.join(case_test_dir, env_file), "w")
+ f.write(env_text)
+ f.close()
+
+ # write data file
+ f = open(os.path.join(case_test_dir, data_file), "w")
+ f.write("to be copied")
+ f.close()
+
+ # job params
+ local_result_dir = os.path.join(case_test_dir, "result_com_job-")
+ job_params = self.create_JobParameters()
+ job_params.job_type = "command"
+ job_params.job_file = script_file
+ job_params.env_file = env_file
+ job_params.in_files = [data_file]
+ job_params.out_files = ["result.txt", "copie"]
+ job_params.local_directory = case_test_dir
+
+ # create and launch the job
+ launcher = createLauncher()
+ resManager= createResourcesManager()
+
+ for resource in self.ressources:
+ print("Testing command job on ", resource)
+ job_params.result_directory = local_result_dir + resource
+ job_params.job_name = "CommandJob_" + resource
+ job_params.resource_required.name = resource
+
+ # use the working directory of the resource
+ resParams = resManager.GetResourceDefinition(resource)
+ wd = os.path.join(resParams.working_directory,
+ "CommandJob" + self.suffix)
+ job_params.work_directory = wd
+
+ job_id = launcher.createJob(job_params)
+ launcher.launchJob(job_id)
+ # wait for the end of the job
+ jobState = launcher.getJobState(job_id)
+ print("Job %d state: %s" % (job_id,jobState))
+ while jobState != "FINISHED" and jobState != "FAILED" :
+ time.sleep(3)
+ jobState = launcher.getJobState(job_id)
+ print("Job %d state: %s" % (job_id,jobState))
+ pass
+
+ # verify the results
+ self.assertEqual(jobState, "FINISHED")
+ launcher.getJobResults(job_id, "")
+ self.verifyFile(os.path.join(job_params.result_directory, "result.txt"),
+ "expected")
+ self.verifyFile(os.path.join(job_params.result_directory,
+ "copie",'copie.txt'),
+ "to be copied")
+
+ # verify getJobWorkFile
+ mydir = os.path.join(case_test_dir, "work_dir" + resource)
+ success = launcher.getJobWorkFile(job_id, "result.txt", mydir)
+ self.assertEqual(success, True)
+ self.verifyFile(os.path.join(mydir, "result.txt"), "expected")
+
+ success = launcher.getJobWorkFile(job_id, "copie", mydir)
+ self.assertEqual(success, True)
+ self.verifyFile(os.path.join(mydir, "copie", "copie.txt"),
+ "to be copied")
+
+
+ ##############################
+ # test of yacs job type
+ ##############################
+ def test_yacs(self):
+ yacs_path = os.getenv("YACS_ROOT_DIR", "")
+ if not os.path.isdir(yacs_path):
+ self.skipTest("Needs YACS module to run. Please define YACS_ROOT_DIR.")
+
+ case_test_dir = os.path.join(TestCompo.test_dir, "yacs")
+ mkdir_p(case_test_dir)
+
+ #environment script
+ env_file = "myEnv.sh"
+ env_text = """export ENV_TEST_VAR="expected"
+"""
+ f = open(os.path.join(case_test_dir, env_file), "w")
+ f.write(env_text)
+ f.close()
+
+ # job script
+ script_text = """<?xml version='1.0' encoding='iso-8859-1' ?>
+<proc name="newSchema_1">
+ <container name="DefaultContainer">
+ <property name="container_kind" value="Salome"/>
+ <property name="attached_on_cloning" value="0"/>
+ <property name="container_name" value="FactoryServer"/>
+ <property name="name" value="localhost"/>
+ </container>
+ <inline name="PyScript0">
+ <script><code><![CDATA[import os
+text_result = os.getenv("ENV_TEST_VAR","")
+f = open('result.txt', 'w')
+f.write(text_result)
+f.close()
+]]></code></script>
+ <load container="DefaultContainer"/>
+ </inline>
+</proc>
+"""
+ yacs_file = "mySchema.xml"
+ job_script_file = os.path.join(case_test_dir, yacs_file)
+ f = open(job_script_file, "w")
+ f.write(script_text)
+ f.close()
+
+ local_result_dir = os.path.join(case_test_dir, "result_yacs_job-")
+ job_params = self.create_JobParameters()
+ job_params.job_type = "yacs_file"
+ job_params.job_file = job_script_file
+ job_params.env_file = os.path.join(case_test_dir,env_file)
+ job_params.out_files = ["result.txt"]
+
+ # define the interval between two YACS schema dumps (3 seconds)
+ #import Engines
+ #job_params.specific_parameters = [Engines.Parameter("EnableDumpYACS", "3")]
+ job_params.specific_parameters = {"EnableDumpYACS": "3"}
+
+ launcher = createLauncher()
+ resManager= createResourcesManager()
+
+ for resource in self.ressources:
+ print("Testing yacs job on ", resource)
+ job_params.result_directory = local_result_dir + resource
+ job_params.job_name = "YacsJob_" + resource
+ job_params.resource_required.name = resource
+
+ # use the working directory of the resource
+ resParams = resManager.GetResourceDefinition(resource)
+ wd = os.path.join(resParams.working_directory,
+ "YacsJob" + self.suffix)
+ job_params.work_directory = wd
+
+ job_id = launcher.createJob(job_params)
+ launcher.launchJob(job_id)
+ jobState = launcher.getJobState(job_id)
+
+ yacs_dump_success = False
+ print("Job %d state: %s" % (job_id,jobState))
+ while jobState != "FINISHED" and jobState != "FAILED" :
+ time.sleep(5)
+ jobState = launcher.getJobState(job_id)
+# yacs_dump_success = launcher.getJobWorkFile(job_id, "dumpState_mySchema.xml",
+ yacs_dump_success = launcher.getJobDumpState(job_id,
+ job_params.result_directory)
+ print("Job %d state: %s - dump: %s" % (job_id,jobState, yacs_dump_success))
+ pass
+
+ self.assertEqual(jobState, "FINISHED")
+
+ # Verify dumpState file is in the results
+ self.assertTrue(yacs_dump_success)
+ dump_file_path = os.path.join(job_params.result_directory,
+ "dumpState_mySchema.xml")
+ self.assertTrue(os.path.isfile(dump_file_path))
+
+ """
+ # Load the schema state from the dump file and verify the state of a node
+ import SALOMERuntime
+ SALOMERuntime.RuntimeSALOME_setRuntime(1)
+ import loader
+ schema = loader.YACSLoader().load(job_script_file)
+ stateParser = loader.stateParser()
+ sl = loader.stateLoader(stateParser, schema)
+ sl.parse(dump_file_path)
+ # 106 : "DONE" state code
+ self.assertEqual(106, schema.getChildByName("PyScript0").getEffectiveState())
+ """
+
+ # getJobResults to default directory (result_directory)
+ launcher.getJobResults(job_id, "")
+ self.verifyFile(os.path.join(job_params.result_directory, "result.txt"),
+ "expected")
+
+ ##############################
+ # test of yacs job type using "--init_port" driver option
+ ##############################
+ def test_yacsopt(self):
+ yacs_path = os.getenv("YACS_ROOT_DIR", "")
+ if not os.path.isdir(yacs_path):
+ self.skipTest("Needs YACS module to run. Please define YACS_ROOT_DIR.")
+
+ case_test_dir = os.path.join(TestCompo.test_dir, "yacs_opt")
+ mkdir_p(case_test_dir)
+
+ # job script
+ script_text = """<?xml version='1.0' encoding='iso-8859-1' ?>
+<proc name="myschema">
+ <type name="string" kind="string"/>
+ <type name="bool" kind="bool"/>
+ <type name="double" kind="double"/>
+ <type name="int" kind="int"/>
+ <container name="DefaultContainer">
+ <property name="container_kind" value="Salome"/>
+ <property name="attached_on_cloning" value="0"/>
+ <property name="container_name" value="FactoryServer"/>
+ <property name="name" value="localhost"/>
+ </container>
+ <inline name="mynode">
+ <script><code><![CDATA[
+text_result = "i=%s,d=%s,b=%s,s=%s" % (i,d,b,s)
+f = open('result.txt', 'w')
+f.write(text_result)
+f.close()
+]]></code></script>
+ <load container="DefaultContainer"/>
+ <inport name="i" type="int"/>
+ <inport name="d" type="double"/>
+ <inport name="b" type="bool"/>
+ <inport name="s" type="string"/>
+ </inline>
+</proc>
+"""
+ yacs_file = "simpleSchema.xml"
+ job_script_file = os.path.join(case_test_dir, yacs_file)
+ f = open(job_script_file, "w")
+ f.write(script_text)
+ f.close()
+
+ local_result_dir = os.path.join(case_test_dir, "result_yacsopt_job-")
+ job_params = self.create_JobParameters()
+ job_params.job_type = "yacs_file"
+ job_params.job_file = job_script_file
+ job_params.out_files = ["result.txt"]
+
+ # define the interval between two YACS schema dumps (3 seconds)
+ #import Engines
+ #job_params.specific_parameters = [Engines.Parameter("YACSDriverOptions",
+ # "-imynode.i=5 -imynode.d=3.7 -imynode.b=False -imynode.s=lili")]
+ job_params.specific_parameters = {"YACSDriverOptions":
+ "-imynode.i=5 -imynode.d=3.7 -imynode.b=False -imynode.s=lili"}
+ expected_result="i=5,d=3.7,b=False,s=lili"
+
+ launcher = createLauncher()
+ resManager= createResourcesManager()
+
+ for resource in self.ressources:
+ print("Testing yacs job with options on ", resource)
+ job_params.result_directory = local_result_dir + resource
+ job_params.job_name = "YacsJobOpt_" + resource
+ job_params.resource_required.name = resource
+
+ # use the working directory of the resource
+ resParams = resManager.GetResourceDefinition(resource)
+ wd = os.path.join(resParams.working_directory,
+ "YacsJobOpt" + self.suffix)
+ job_params.work_directory = wd
+
+ job_id = launcher.createJob(job_params)
+ launcher.launchJob(job_id)
+ jobState = launcher.getJobState(job_id)
+
+ yacs_dump_success = False
+ print("Job %d state: %s" % (job_id,jobState))
+ while jobState != "FINISHED" and jobState != "FAILED" :
+ time.sleep(5)
+ jobState = launcher.getJobState(job_id)
+ print("Job %d state: %s " % (job_id,jobState))
+ pass
+
+ self.assertEqual(jobState, "FINISHED")
+
+ # getJobResults to default directory (result_directory)
+ launcher.getJobResults(job_id, "")
+ self.verifyFile(os.path.join(job_params.result_directory, "result.txt"),
+ expected_result)
+
+ ############################################
+ # test of command job type with pre_command
+ ############################################
+ def test_command_pre(self):
+ case_test_dir = os.path.join(TestCompo.test_dir, "command_pre")
+ mkdir_p(case_test_dir)
+
+ # command to be run before the job
+ pre_command = "pre_command.sh"
+ pre_command_text = "echo 'it works!' > in.txt"
+ abs_pre_command_file = os.path.join(case_test_dir, pre_command)
+ f = open(abs_pre_command_file, "w")
+ f.write(pre_command_text)
+ f.close()
+ os.chmod(abs_pre_command_file, 0o755)
+
+ # job script
+ script_file = "myTestScript.py"
+ script_text = """#! /usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+in_f = open("in.txt", "r")
+in_text = in_f.read()
+in_f.close()
+
+f = open('result.txt', 'w')
+f.write(in_text)
+f.close()
+"""
+ abs_script_file = os.path.join(case_test_dir, script_file)
+ f = open(abs_script_file, "w")
+ f.write(script_text)
+ f.close()
+ os.chmod(abs_script_file, 0o755)
+
+ # job params
+ local_result_dir = os.path.join(case_test_dir, "result_com_pre_job-")
+ job_params = self.create_JobParameters()
+ job_params.job_type = "command"
+ job_params.job_file = script_file
+ job_params.pre_command = pre_command
+ job_params.in_files = []
+ job_params.out_files = ["result.txt"]
+ job_params.local_directory = case_test_dir
+
+ # create and launch the job
+ launcher = createLauncher()
+ resManager= createResourcesManager()
+
+ for resource in self.ressources:
+ print("Testing command job on ", resource)
+ job_params.result_directory = local_result_dir + resource
+ job_params.job_name = "CommandPreJob_" + resource
+ job_params.resource_required.name = resource
+
+ # use the working directory of the resource
+ resParams = resManager.GetResourceDefinition(resource)
+ wd = os.path.join(resParams.working_directory,
+ "CommandPreJob" + self.suffix)
+ job_params.work_directory = wd
+
+ job_id = launcher.createJob(job_params)
+ launcher.launchJob(job_id)
+ # wait for the end of the job
+ jobState = launcher.getJobState(job_id)
+ print("Job %d state: %s" % (job_id,jobState))
+ while jobState != "FINISHED" and jobState != "FAILED" :
+ time.sleep(3)
+ jobState = launcher.getJobState(job_id)
+ print("Job %d state: %s" % (job_id,jobState))
+ pass
+
+ # verify the results
+ self.assertEqual(jobState, "FINISHED")
+ launcher.getJobResults(job_id, "")
+ self.verifyFile(os.path.join(job_params.result_directory, "result.txt"),
+ "it works!\n")
+
+ #################################
+ # test of command salome job type
+ #################################
+ def test_command_salome(self):
+ case_test_dir = os.path.join(TestCompo.test_dir, "command_salome")
+ mkdir_p(case_test_dir)
+
+ # job script
+ data_file = "in.txt"
+ script_file = "myEnvScript.py"
+ script_text = """#! /usr/bin/env python3
+# -*- coding: utf-8 -*-
+
+import os,sys
+# verify import salome
+import salome
+
+text_result = os.getenv("ENV_TEST_VAR","")
+
+f = open('result.txt', 'w')
+f.write(text_result)
+f.close()
+
+in_f = open("in.txt", "r")
+in_text = in_f.read()
+in_f.close()
+
+os.mkdir("copie")
+f = open(os.path.join("copie",'copie.txt'), 'w')
+f.write(in_text)
+f.close()
+"""
+ abs_script_file = os.path.join(case_test_dir, script_file)
+ f = open(abs_script_file, "w")
+ f.write(script_text)
+ f.close()
+ os.chmod(abs_script_file, 0o755)
+
+ #environment script
+ env_file = "myEnv.sh"
+ env_text = """export ENV_TEST_VAR="expected"
+"""
+ f = open(os.path.join(case_test_dir, env_file), "w")
+ f.write(env_text)
+ f.close()
+
+ # write data file
+ f = open(os.path.join(case_test_dir, data_file), "w")
+ f.write("to be copied")
+ f.close()
+
+ # job params
+ local_result_dir = os.path.join(case_test_dir, "result_comsalome_job-")
+ job_params = self.create_JobParameters()
+ job_params.job_type = "command_salome"
+ job_params.job_file = script_file
+ job_params.env_file = env_file
+ job_params.in_files = [data_file]
+ job_params.out_files = ["result.txt", "copie"]
+ job_params.local_directory = case_test_dir
+
+ # create and launch the job
+ launcher = createLauncher()
+ resManager= createResourcesManager()
+
+ for resource in self.ressources:
+ print("Testing command salome job on ", resource)
+ job_params.result_directory = local_result_dir + resource
+ job_params.job_name = "CommandSalomeJob_" + resource
+ job_params.resource_required.name = resource
+
+ # use the working directory of the resource
+ resParams = resManager.GetResourceDefinition(resource)
+ wd = os.path.join(resParams.working_directory,
+ "CommandSalomeJob" + self.suffix)
+ job_params.work_directory = wd
+
+ job_id = launcher.createJob(job_params)
+ launcher.launchJob(job_id)
+ # wait for the end of the job
+ jobState = launcher.getJobState(job_id)
+ print("Job %d state: %s" % (job_id,jobState))
+ while jobState != "FINISHED" and jobState != "FAILED" :
+ time.sleep(3)
+ jobState = launcher.getJobState(job_id)
+ print("Job %d state: %s" % (job_id,jobState))
+ pass
+
+ # verify the results
+ self.assertEqual(jobState, "FINISHED")
+ launcher.getJobResults(job_id, "")
+ self.verifyFile(os.path.join(job_params.result_directory, "result.txt"),
+ "expected")
+ self.verifyFile(os.path.join(job_params.result_directory,
+ "copie",'copie.txt'),
+ "to be copied")
+
+ # verify getJobWorkFile
+ mydir = os.path.join(case_test_dir, "work_dir" + resource)
+ success = launcher.getJobWorkFile(job_id, "result.txt", mydir)
+ self.assertEqual(success, True)
+ self.verifyFile(os.path.join(mydir, "result.txt"), "expected")
+
+ success = launcher.getJobWorkFile(job_id, "copie", mydir)
+ self.assertEqual(success, True)
+ self.verifyFile(os.path.join(mydir, "copie", "copie.txt"),
+ "to be copied")
+ pass
+ pass
+ pass
+
+if __name__ == '__main__':
+ # create study
+ unittest.main()
# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
#
-import pylauncher as pyla
-jyf=pyla.Job_YACSFile()
-jyf.setJobName("YDFX")
-jyf.setWorkDirectory("/scratch/geay/Example")
-jyf.setLocalDirectory("/home/geay/Example")
-jyf.setResultDirectory("/home/geay/Example")
-jyf.setLauncherFile("")
-jyf.setLauncherArgs("")
-jyf.setJobFile("/tmp/EvalYFX_geay_180119_093600.xml") # schema YACS
-jyf.setPreCommand("")
-jyf.setEnvFile("")
-for elt in []:
- jyf.add_in_file(elt)
-for elt in ["EvalYFX_geay_180119_093600"]:
- jyf.add_out_file(elt)
-jyf.setMaximumDuration("00:05")
-jyf.setQueue("")
-jyf.setPartition("")
-jyf.setExclusive(False)
-jyf.setMemPerCpu(0)
-jyf.setWCKey("P11U5:CARBONES")
-jyf.setExtraParams("")
-#
-rp=pyla.resourceParams()
-rp.name = "athos"
-rp.hostname = ""
-rp.OS = "Linux"
-rp.nb_proc = 5
-rp.nb_node = 0
-rp.nb_proc_per_node = 1
-rp.cpu_clock = -1
-rp.mem_mb = 0
-jyf.setResourceRequiredParams(rp)
-jyf.checkSpecificParameters()
-l = pyla.Launcher_cpp()
-res = pyla.ResourcesManager_cpp("/home/geay/salome/V9_DEV/appli_V9_2_0/CatalogResources.xml")
-l.SetResourcesManager(res)
-l.createJob(jyf)
-nb=jyf.getNumber()
-l.launchJob(nb)
-l.sendJobToSession(nb)
-#l.getJobState(nb)
-#l.getJobResults(nb,jyf.getLocalDirectory())
+import pylauncher
+jp = pylauncher.JobParameters_cpp()
+jp.job_name = "toto"
+jp.job_type = "command"
+jp.job_file = "/home/I35256/salome/scripts/job_sh/script.sh"
+jp.work_directory = "/tmp/wd"
+jp.result_directory = "/tmp/rd"
+rp = pylauncher.resourceParams()
+rp.name="localhost"
+rp.hostname="localhost"
+rp.nb_proc = 1
+jp.resource_required = rp
+launcher = pylauncher.Launcher_cpp()
+# no catalog. localhost is defined anyway
+res = pylauncher.ResourcesManager_cpp("")
+launcher.SetResourcesManager(res)
+jobid = launcher.createJob(jp)
+launcher.launchJob(jobid)
*/
//=============================================================================
-SALOME_ResourcesManager::SALOME_ResourcesManager(CORBA::ORB_ptr orb, PortableServer::POA_var poa, SALOME_NamingService *ns, const char *xmlFilePath) : _rm(xmlFilePath)
+SALOME_ResourcesManager::SALOME_ResourcesManager(CORBA::ORB_ptr orb,
+ PortableServer::POA_var poa,
+ SALOME_NamingService *ns,
+ const char *xmlFilePath)
+: _rm(new ResourcesManager_cpp(xmlFilePath))
{
MESSAGE("SALOME_ResourcesManager constructor");
_NS = ns;
SALOME_ResourcesManager::SALOME_ResourcesManager(CORBA::ORB_ptr orb,
PortableServer::POA_var poa,
- SALOME_NamingService *ns) : _rm()
+ SALOME_NamingService *ns) : _rm(new ResourcesManager_cpp())
{
MESSAGE("SALOME_ResourcesManager constructor");
_NS = ns;
*/
void SALOME_ResourcesManager::ListAllAvailableResources(Engines::ResourceList_out machines, Engines::IntegerList_out nbProcsOfMachines)
{
- const MapOfParserResourcesType& zeList(_rm.GetList());
+ const MapOfParserResourcesType& zeList(_rm->GetList());
std::size_t sz(zeList.size());
std::vector<std::string> ret0(sz);
std::vector<int> ret1(sz);
try
{
// Call C++ ResourceManager
- std::vector <std::string> vec = _rm.GetFittingResources(p);
+ std::vector <std::string> vec = _rm->GetFittingResources(p);
// C++ -> CORBA
ret = resourceList_CPPtoCORBA(vec);
// CORBA -> C++
std::vector<std::string> rl = resourceList_CORBAtoCPP(listOfResources);
- return CORBA::string_dup(_rm.Find("first", rl).c_str());
+ return CORBA::string_dup(_rm->Find("first", rl).c_str());
}
char *
// CORBA -> C++
std::vector<std::string> rl = resourceList_CORBAtoCPP(listOfResources);
- return CORBA::string_dup(_rm.Find(policy, rl).c_str());
+ return CORBA::string_dup(_rm->Find(policy, rl).c_str());
}
Engines::ResourceDefinition*
{
Engines::ResourceDefinition_var resDef;
try {
- ParserResourcesType resource = _rm.GetResourcesDescr(name);
+ ParserResourcesType resource = _rm->GetResourcesDescr(name);
resDef = resourceDefinition_CPPtoCORBA(resource);
} catch (const exception & ex) {
INFOS("Caught exception in GetResourceDefinition: " << ex.what());
try
{
ParserResourcesType resource = resourceDefinition_CORBAtoCPP(new_resource);
- _rm.AddResourceInCatalog(resource);
+ _rm->AddResourceInCatalog(resource);
if (write)
{
- _rm.WriteInXmlFile(std::string(xml_file));
- _rm.ParseXmlFiles();
+ _rm->WriteInXmlFile(std::string(xml_file));
+ _rm->ParseXmlFiles();
}
}
catch (const SALOME_Exception & e)
{
try
{
- _rm.DeleteResourceInCatalog(resource_name);
+ _rm->DeleteResourceInCatalog(resource_name);
}
catch (const SALOME_Exception & e)
{
if (write)
{
- _rm.WriteInXmlFile(std::string(xml_file));
- _rm.ParseXmlFiles();
+ _rm->WriteInXmlFile(std::string(xml_file));
+ _rm->ParseXmlFiles();
}
}
if (std::string(parallelLib) == "Dummy")
{
MESSAGE("[getMachineFile] parallelLib is Dummy");
- MapOfParserResourcesType resourcesList = _rm.GetList();
+ MapOfParserResourcesType resourcesList = _rm->GetList();
if (resourcesList.find(std::string(resource_name)) != resourcesList.end())
{
ParserResourcesType resource = resourcesList[std::string(resource_name)];
{
MESSAGE("[getMachineFile] parallelLib is Mpi");
- MapOfParserResourcesType resourcesList = _rm.GetList();
+ MapOfParserResourcesType resourcesList = _rm->GetList();
if (resourcesList.find(std::string(resource_name)) != resourcesList.end())
{
ParserResourcesType resource = resourcesList[std::string(resource_name)];
#include <string>
#include <fstream>
#include <vector>
+#include <memory>
#include "ResourcesManager.hxx"
#include "SALOME_ResourcesManager_Common.hxx"
void ListAllAvailableResources(Engines::ResourceList_out machines, Engines::IntegerList_out nbProcsOfMachines);
// Cpp Methods
void Shutdown();
- ResourcesManager_cpp *GetImpl() { return &_rm; }
+ std::shared_ptr<ResourcesManager_cpp>& GetImpl() { return _rm; }
static const char *_ResourcesManagerNameInNS;
SALOME_NamingService *_NS;
CORBA::ORB_var _orb;
PortableServer::POA_var _poa;
- ResourcesManager_cpp _rm;
+ std::shared_ptr<ResourcesManager_cpp> _rm;
};
#endif // RESSOURCESCATALOG_IMPL_H