#include "SALOME_Exception.idl"
#include "SALOME_ResourcesManager.idl"
-/*! \file SALOME_Launcher.idl \brief interfaces for %SALOME Launcher service
+/*! \file SALOME_Launcher.idl \brief Interfaces for %SALOME Launcher service
*/
module Engines
struct JobParameters
{
+ //! Name of the job.
string job_name;
- //! Job Type - Could be equal to "command" or "yacs_file" or "python_salome"
+
+ //! Type of the job.
+ /*! There are three supported types:
+ - "command" : execute #job_file script without %SALOME environment
+ - "python_salome" : execute #job_file python script by %SALOME
+ - "yacs_file" : execute #job_file by YACS module as a xml YACS schema
+ */
string job_type;
// Common values
+ //! Local path to the file to be executed by the job.
+ /*! The type of the file depends on #job_type.
+ If #job_type is "command", the #job_file must be a single filename
+ specifying a self-consistent script to be executed without any argument,
+ on the remote host.
+ */
string job_file;
+
+ //! Local path to a script to be sourced in the environment of the job.
+ /*! It may contain modifications of environment variables.
+ */
string env_file;
+
+ //! List of local data files to be copied to #work_directory.
+ /*! #job_file and #env_file are automaticaly copied, without adding them
+ to this list. If basenames are specified, then the files are supposed
+ to be located in #local_directory.
+ */
FilesList in_files;
+
+ //! List of results to get back at the end of the job.
+ /*! These results can be names of files or directories, produced by the job
+ in #work_directory. Directories will be copied recursively.
+ It is also possible to use an absolute path instead of the simple name,
+ (string beginning with '/') and this absolute path will be used instead
+ of #result_directory when SalomeLauncher::getJobResults is called.
+ \see SalomeLauncher::getJobResults
+ */
FilesList out_files;
+
+ //! Remote directory where the job will be executed.
+ /*! It must be used to specify the remote directory where to put all
+ the stuff to run the job. Note that the job will be executed from within
+ this directory. A change directory toward this working directory is done
+ by the batch system before running the job.
+ */
string work_directory;
+
+ //! Prefix to be applied to #in_files.
+ /*! It can be used to specify where to find the local input files.
+ It's optionnal if you specify the absolute path name of input files.
+ */
string local_directory;
+
+ //! Local directory where to get result files.
+ /*! It must be used to specify where to download the output files on the
+ local file system.
+ \see SalomeLauncher::getJobResults
+ */
string result_directory;
- /*! Time for the batch (has to be like this : hh:mm) - Could be empty, in
- this case, default value of the selected resource will be used.
+ //! Maximum time for the batch execution (expected format : "hh:mm").
+ /*! Could be empty, in this case, default value of the selected resource
+ will be used.
*/
string maximum_duration;
- // Memory is expressed in megabytes -> mem_mb
- // Number of Processors -> nb_proc
+ //! Specifies the rules to choose the ressource where to execute the job.
+ /*! The additionnal two following parameters MUST be specified explicitly,
+ because they are not provided by the resource definition:
+ - mem_mb -> Memory expressed in megabytes.
+ - nb_proc -> Number of Processors.
+ */
ResourceParameters resource_required;
- /*!
- Name of the batch queue choosed - optional
- */
+ //! Name of the batch queue chosen - optional
string queue;
//! Specifies if the job must run in exclusive mode (without sharing nodes with other jobs)
//! String that is added to the job submission file - optional
string extra_params;
- /*!
- Specific parameters for each type of job - optional
+ //! Specific parameters for each type of job - optional
+ /*! This is a list of parameters (key - value pairs of strings) useful in
+ some specific situations.
+ Known parameters:
+ - EnableDumpYACS : value of the "dump" option of the "driver" command
+ when the job type is "yacs_file". It gives the number of seconds
+ between two updates of the state dump file. There will be no dump file
+ if this parameter is missing or if its value is less than 1.
*/
Engines::ParameterList specific_parameters;
- // Parameters for COORM
+ //! %Parameter for COORM
string launcher_file;
+ //! %Parameter for COORM
string launcher_args;
};
void notify(in string event_name, in string event_data);
};
-/*! \brief Interface of the %salomelauncher
- This interface is used for interaction with the unique instance
- of SalomeLauncher
+//! Interface of the %salome launcher.
+/*! This interface is used for interaction with the unique instance
+ of SalomeLauncher.
+ The utilisation of this interface is explained in the YACS documentation,
+ article "Starting a SALOME application in a batch manager".
+ Other examples of use can be found in the modules JOBMANAGER, PARAMETRIC
+ and SMESH (PADDER tool).
*/
interface SalomeLauncher
{
// Main methods
+ //! Create a job and set its parameters, without launching it.
+ /*! Its state becomes "CREATED".
+ \return job id
+ */
long createJob (in Engines::JobParameters job_parameters) raises (SALOME::SALOME_Exception);
+
+ //! Launch an already created job (job's state should be "CREATED").
+ /*! Launching the job consists of:
+ - create the working directory on the remote file system
+ - copy the input files into the working directory
+ - source the environment file if defined
+ - run the job
+ */
void launchJob (in long job_id) raises (SALOME::SALOME_Exception);
+
+ //! Get the execution state of the job.
+ /*! \return "CREATED", "IN_PROCESS", "QUEUED", "RUNNING", "PAUSED",
+ "FINISHED" or "FAILED"
+ \see LIBBATCH/src/core/Constants.hxx
+ */
string getJobState (in long job_id) raises (SALOME::SALOME_Exception);
- string getAssignedHostnames (in long job_id) raises (SALOME::SALOME_Exception); // Get names or ids of hosts assigned to the job
+
+ //! Get names or ids of hosts assigned to the job
+ string getAssignedHostnames (in long job_id) raises (SALOME::SALOME_Exception);
+
+ //! Copy the result files from the work directory of the job
+ //! to a local directory.
+ /*! The list of result files is given by the JobParameters::out_files parameter.
+ If a result "file" is a directory, the copy is recursive.
+ The "logs" directory contains the standard and the error outputs of the job.
+ \param job_id Job id returned by createJob().
+ \param directory Local directory where to copy the results.
+ If this value is an empty string (""), files will be
+ copied to the directory given by
+ JobParameters::result_directory.
+ \see JobParameters
+ \see createJob
+ */
void getJobResults(in long job_id, in string directory) raises (SALOME::SALOME_Exception);
+
+ //! Try to copy the files named "dumpState*.xml" from the working directory.
+ /*! The file "dumpState_name.xml" can be produced by the execution of a YACS
+ schema and it contains the execution state of the schema.
+ You can activate the creation of this file by adding the parameter
+ "EnableDumpYACS" in JobParameters::specific_parameters when the job
+ is created.
+ \param job_id Job id returned by createJob().
+ \param directory Local directory where to copy the file.
+ If this value is an empty string (""), the file will be
+ copied to the directory given by
+ JobParameters::result_directory.
+ \return 1 if the copy succeeds.
+ \see JobParameters::specific_parameters
+ */
boolean getJobDumpState(in long job_id, in string directory) raises (SALOME::SALOME_Exception);
+
+ //! Kill the job and set its state to "FAILED"
void stopJob (in long job_id) raises (SALOME::SALOME_Exception);
+ //! Kill the job and remove it from the jobs list
void removeJob (in long job_id) raises (SALOME::SALOME_Exception);
// Useful methods
boolean testBatch (in ResourceParameters params) raises (SALOME::SALOME_Exception);
// SALOME kernel service methods
+ //! Shutdow SalomeLauncher server.
void Shutdown();
+ //! Get the PID of the current process
long getPID();
// Observer and introspection methods
+ //! Add an observer to be notified of the jobs list modifications
void addObserver(in Engines::SalomeLauncherObserver observer);
void removeObserver(in Engines::SalomeLauncherObserver observer);
Engines::JobsList getJobsList();
Engines::JobParameters getJobParameters(in long job_id) raises (SALOME::SALOME_Exception);
// Save and load methods
+ //! Add to the current jobs list the jobs previously saved in an xml file.
void loadJobs(in string jobs_file) raises (SALOME::SALOME_Exception);
+ //! Save the current list of jobs in an xml file.
void saveJobs(in string jobs_file) raises (SALOME::SALOME_Exception);
};
struct ResourceParameters
{
//! resource name - manual selection
+ /*! If a name is provided, the ressource will be imposed.
+ If the name is an empty string, the ressource will be chosen to match
+ the other parameters.
+ */
string name;
+
//! host name
string hostname;
//! if true select only resources that can launch batch jobs
CompoList componentList;
// Permits to order resources
- //! required number of proc
+ //! required number of processors
+ /*! This parameter must be specified explicitly, because it is not provided
+ by the resource definition.
+ */
long nb_proc;
+
//! required memory size
+ /*! This parameter must be specified explicitly, because it is not provided
+ by the resource definition.
+
+ The parameter specifies the maximum memory value that could be allocated
+ for executing the job. This takes into account not only the data that
+ could be loaded by the batch process but also the linked dynamic library.
+ A possible problem, for exemple in the case where you use the ssh
+ emulation of a batch system, is to get an error message as below
+ when libBatch tries to run the ssh command:
+\verbatim
+/usr/bin/ssh: error while loading shared libraries: libcrypto.so.0.9.8: failed
+to map segment from shared object: Cannot allocate memory
+\endverbatim
+ In this exemple, the mem_mb was set to 1MB, value that is not
+ sufficient to load the dynamic libraries linked to the ssh
+ executable (libcrypto.so in the error message).
+ So, even in the case of a simple test shell script, you should
+ set this value at least to a standard threshold as 500MB.
+ */
long mem_mb;
//! required frequency
long cpu_clock;
- //! required number of node
+ //! required number of nodes
long nb_node;
//! required number of proc per node
long nb_proc_per_node;
# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
#
+IF(SALOME_BUILD_TESTS)
+ ADD_SUBDIRECTORY(Test)
+ENDIF(SALOME_BUILD_TESTS)
+
INCLUDE_DIRECTORIES(
${LIBXML2_INCLUDE_DIR}
${OMNIORB_INCLUDE_DIR}
for(std::list<std::string>::iterator it = _out_files.begin(); it != _out_files.end(); it++)
{
std::string file = *it;
-
- // local file
- size_t found = file.find_last_of("/");
- std::string local_file = _result_directory + "/" + file.substr(found+1);
-
// remote file -> If file is not an absolute path, we apply _work_directory
std::string remote_file;
+ std::string local_file;
if (file.substr(0, 1) == std::string("/"))
+ {
remote_file = file;
+ size_t found = file.find_last_of("/");
+ local_file = file.substr(found+1);
+ }
else
+ {
remote_file = _work_directory + "/" + file;
+ local_file = file;
+ }
params[Batch::OUTFILE] += Batch::Couple(local_file, remote_file);
}
--- /dev/null
+# Copyright (C) 2012-2014 CEA/DEN, EDF R&D
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# See http://www.salome-platform.org/ or email : webmaster.salome@opencascade.com
+#
+
+# --- rules ---
+
+IF(NOT WIN32)
+ ADD_TEST(NAME SalomeLauncher
+ COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/../../UnitTests/prepare_test.py
+ ${CMAKE_CURRENT_SOURCE_DIR}/test_launcher.py
+ -d KERNEL_ROOT_DIR=${CMAKE_INSTALL_PREFIX}
+ )
+ENDIF()
--- /dev/null
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import unittest
+import os
+import sys
+import time
+
+# Test of SalomeLauncher.
+# This test should be run in the salome environment, using "salome shell"
+# and salome application should be running.
+# If YACS_ROOT_DIR is not set, the test of submitting a YACS schema will be
+# skiped.
+class TestCompo(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ # Prepare the test directory
+ import shutil
+ cls.test_dir = os.path.join(os.getcwd(), "test_dir")
+ cls.suffix = time.strftime("-%Y-%m-%d-%H-%M-%S")
+ shutil.rmtree(cls.test_dir, ignore_errors=True)
+ os.mkdir(cls.test_dir)
+
+ ##############################
+ # test of python_salome job
+ ##############################
+ def test_salome_py_job(self):
+ case_test_dir = os.path.join(TestCompo.test_dir, "salome_py")
+ os.mkdir(case_test_dir)
+
+ old_dir = os.getcwd()
+ os.chdir(case_test_dir)
+
+ # job script
+ script_file = "myScript.py"
+ job_script_file = os.path.join(case_test_dir, script_file)
+ script_text = """#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# verify import salome
+import salome
+salome.salome_init()
+
+f = open('result.txt', 'w')
+f.write("Salut!")
+f.close()
+
+import os
+os.mkdir("subdir")
+f = open(os.path.join("subdir",'autre.txt'), 'w')
+f.write("Hello!")
+f.close()
+"""
+ f = open(job_script_file, "w")
+ f.write(script_text)
+ f.close()
+
+ local_result_dir = os.path.join(case_test_dir, "result_simple_py_job")
+ job_params = salome.JobParameters()
+ job_params.job_name = "MyJob"
+ job_params.job_type = "python_salome"
+ job_params.job_file = job_script_file
+ job_params.in_files = []
+ job_params.out_files = ["result.txt", "subdir"]
+ job_params.result_directory = local_result_dir
+ job_params.resource_required = salome.ResourceParameters()
+ job_params.resource_required.nb_proc = 1
+
+ job_params.work_directory = "/tmp/job_salome_py" + self.suffix
+
+ launcher = salome.naming_service.Resolve('/SalomeLauncher')
+ job_id = launcher.createJob(job_params)
+
+ launcher.launchJob(job_id)
+
+ import time
+ jobState = launcher.getJobState(job_id)
+ print "Job %d state: %s" % (job_id,jobState)
+ while jobState != "FINISHED" and jobState != "FAILED" :
+ time.sleep(5)
+ jobState = launcher.getJobState(job_id)
+ print "Job %d state: %s" % (job_id,jobState)
+ pass
+
+ self.assertEqual(jobState, "FINISHED")
+
+ # getJobResults to default directory (result_directory)
+ launcher.getJobResults(job_id, "")
+ try:
+ f = open(os.path.join(local_result_dir, "result.txt"), 'r')
+ text = f.read()
+ f.close()
+ self.assertEqual(text, "Salut!")
+ except IOError,ex:
+ self.fail("IO exception:" + str(ex));
+
+ try:
+ f = open(os.path.join(local_result_dir, "subdir", "autre.txt"), 'r')
+ text = f.read()
+ f.close()
+ self.assertEqual(text, "Hello!")
+ except IOError,ex:
+ self.fail("IO exception:" + str(ex));
+
+ # getJobResults to a specific directory
+ mydir = os.path.join(case_test_dir, "custom_result_dir")
+ launcher.getJobResults(job_id, mydir)
+ try:
+ f = open(os.path.join(mydir, "result.txt"), 'r')
+ text = f.read()
+ f.close()
+ self.assertEqual(text, "Salut!")
+ except IOError,ex:
+ self.fail("IO exception:" + str(ex));
+
+ try:
+ f = open(os.path.join(mydir, "subdir", "autre.txt"), 'r')
+ text = f.read()
+ f.close()
+ self.assertEqual(text, "Hello!")
+ except IOError,ex:
+ self.fail("IO exception:" + str(ex));
+ pass
+
+ os.chdir(old_dir)
+
+ ##############################
+ # test of command job type
+ ##############################
+ def test_command(self):
+ case_test_dir = os.path.join(TestCompo.test_dir, "command")
+ os.mkdir(case_test_dir)
+
+ # job script
+ data_file = "in.txt"
+ script_file = "myEnvScript.py"
+ script_text = """#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import os,sys
+
+text_result = os.getenv("ENV_TEST_VAR","")
+
+f = open('result.txt', 'w')
+f.write(text_result)
+f.close()
+
+in_f = open("in.txt", "r")
+in_text = in_f.read()
+in_f.close()
+
+os.mkdir("subdir")
+f = open(os.path.join("subdir",'altul.txt'), 'w')
+f.write(in_text)
+f.close()
+"""
+ abs_script_file = os.path.join(case_test_dir, script_file)
+ f = open(abs_script_file, "w")
+ f.write(script_text)
+ f.close()
+ os.chmod(abs_script_file, 0o755)
+
+ #environement script
+ env_file = "myEnv.sh"
+ env_text = """export ENV_TEST_VAR="expected"
+"""
+ f = open(os.path.join(case_test_dir, env_file), "w")
+ f.write(env_text)
+ f.close()
+
+ # write data file
+ f = open(os.path.join(case_test_dir, data_file), "w")
+ f.write("expected data")
+ f.close()
+
+ # job params
+ local_result_dir = os.path.join(case_test_dir, "resultats_env")
+ job_params = salome.JobParameters()
+ job_params.job_name = "CommandJob"
+ job_params.job_type = "command"
+ job_params.job_file = script_file
+ job_params.env_file = env_file
+ job_params.in_files = [data_file]
+ job_params.out_files = ["result.txt", "subdir"]
+ job_params.local_directory = case_test_dir
+ job_params.result_directory = local_result_dir
+ job_params.resource_required = salome.ResourceParameters()
+ job_params.resource_required.nb_proc = 1
+ job_params.work_directory = "/tmp/command_job" + self.suffix
+
+ # create and launch the job
+ launcher = salome.naming_service.Resolve('/SalomeLauncher')
+ job_id = launcher.createJob(job_params)
+ launcher.launchJob(job_id)
+
+ # wait for the end of the job
+ import time
+ jobState = launcher.getJobState(job_id)
+ print "Job %d state: %s" % (job_id,jobState)
+ while jobState != "FINISHED" and jobState != "FAILED" :
+ time.sleep(3)
+ jobState = launcher.getJobState(job_id)
+ print "Job %d state: %s" % (job_id,jobState)
+ pass
+
+ # verify the results
+ self.assertEqual(jobState, "FINISHED")
+ launcher.getJobResults(job_id, "")
+ try:
+ f = open(os.path.join(local_result_dir, "result.txt"), 'r')
+ text = f.read()
+ f.close()
+ self.assertEqual(text, "expected")
+ except IOError,ex:
+ self.fail("IO exception:" + str(ex));
+
+ ##############################
+ # test of yacs job type
+ ##############################
+ def test_yacs(self):
+ yacs_path = os.getenv("YACS_ROOT_DIR", "")
+ if not os.path.isdir(yacs_path):
+ self.skipTest("Needs YACS module to run. Please define YACS_ROOT_DIR.")
+
+ case_test_dir = os.path.join(TestCompo.test_dir, "yacs")
+ os.mkdir(case_test_dir)
+
+ #environement script
+ env_file = "myEnv.sh"
+ env_text = """export ENV_TEST_VAR="expected"
+"""
+ f = open(os.path.join(case_test_dir, env_file), "w")
+ f.write(env_text)
+ f.close()
+
+ # job script
+ script_text = """<?xml version='1.0' encoding='iso-8859-1' ?>
+<proc name="newSchema_1">
+ <property name="DefaultStudyID" value="1"/>
+ <container name="DefaultContainer">
+ <property name="container_kind" value="Salome"/>
+ <property name="attached_on_cloning" value="0"/>
+ <property name="container_name" value="FactoryServer"/>
+ <property name="name" value="localhost"/>
+ </container>
+ <inline name="PyScript0">
+ <script><code><![CDATA[import os
+text_result = os.getenv("ENV_TEST_VAR","")
+f = open('result.txt', 'w')
+f.write(text_result)
+f.close()
+]]></code></script>
+ <load container="DefaultContainer"/>
+ </inline>
+</proc>
+"""
+ yacs_file = "mySchema.xml"
+ job_script_file = os.path.join(case_test_dir, yacs_file)
+ f = open(job_script_file, "w")
+ f.write(script_text)
+ f.close()
+
+ local_result_dir = os.path.join(case_test_dir, "result_yacs_job")
+ job_params = salome.JobParameters()
+ job_params.job_name = "MyYacsJob"
+ job_params.job_type = "yacs_file"
+ job_params.job_file = job_script_file
+ job_params.env_file = os.path.join(case_test_dir,env_file)
+ #job_params.in_files = [case_test_dir]
+ job_params.out_files = ["result.txt"]
+ job_params.result_directory = local_result_dir
+
+ # define the interval between two YACS schema dumps (3 seconds)
+ import Engines
+ job_params.specific_parameters = [Engines.Parameter("EnableDumpYACS", "3")]
+
+ job_params.resource_required = salome.ResourceParameters()
+ job_params.resource_required.nb_proc = 1
+
+ job_params.work_directory = "/tmp/job_yacs" + self.suffix
+
+ launcher = salome.naming_service.Resolve('/SalomeLauncher')
+ job_id = launcher.createJob(job_params)
+
+ launcher.launchJob(job_id)
+
+ import time
+ jobState = launcher.getJobState(job_id)
+ yacs_dump_success = False
+ print "Job %d state: %s" % (job_id,jobState)
+ while jobState != "FINISHED" and jobState != "FAILED" :
+ time.sleep(5)
+ jobState = launcher.getJobState(job_id)
+ yacs_dump_success = launcher.getJobDumpState(job_id, local_result_dir)
+ print "Job %d state: %s - dump: %s" % (job_id,jobState, yacs_dump_success)
+ pass
+
+ self.assertEqual(jobState, "FINISHED")
+
+ # Verify dumpState file is in the results
+ self.assertTrue(yacs_dump_success)
+ dump_file_path = os.path.join(local_result_dir, "dumpState_mySchema.xml")
+ self.assertTrue(os.path.isfile(dump_file_path))
+
+ # Load the schema state from the dump file and verify the state of a node
+ import SALOMERuntime
+ SALOMERuntime.RuntimeSALOME_setRuntime(1)
+ import loader
+ schema = loader.YACSLoader().load(job_script_file)
+ stateParser = loader.stateParser()
+ sl = loader.stateLoader(stateParser, schema)
+ sl.parse(dump_file_path)
+ # 106 : "DONE" state code
+ self.assertEqual(106, schema.getChildByName("PyScript0").getEffectiveState())
+
+ # getJobResults to default directory (result_directory)
+ launcher.getJobResults(job_id, "")
+ try:
+ f = open(os.path.join(local_result_dir, "result.txt"), 'r')
+ text = f.read()
+ f.close()
+ self.assertEqual(text, "expected")
+ except IOError,ex:
+ self.fail("IO exception:" + str(ex))
+
+if __name__ == '__main__':
+ # creat study
+ import salome
+ salome.salome_init()
+ unittest.main()
+
+ # load catalogs
+# mc = salome.naming_service.Resolve('/Kernel/ModulCatalog')
+# ior = salome.orb.object_to_string(mc)
+# import SALOMERuntime
+# SALOMERuntime.RuntimeSALOME_setRuntime()
+# salome_runtime = SALOMERuntime.getSALOMERuntime()
+# session_catalog = salome_runtime.loadCatalog("session", ior)
+# salome_runtime.addCatalog(session_catalog)
+
--- /dev/null
+#! /usr/bin/env python
+# -*- coding: utf-8 -*-
+
+usage="""
+This script prepares the test environment and runs a test script:
+ - clean and create test directory
+ - create a SALOME application
+ - launch salome
+ - launch the test script within SALOME environment
+ - kill salome
+
+ This script uses the following environment variables:
+ - ROOT_SALOME : directory which contains salome_context.cfg.
+ This variable is usually defined in salome_prerequisites.sh
+ - KERNEL_ROOT_DIR and YACS_ROOT_DIR : directories of modules installation
+ Those variables are usually defined in salome_modules.sh
+ Environment variables can be passed to the script using the -d option.
+"""
+
+import os
+import sys
+
+class TestEnvironment:
+ def setUp(self):
+ import shutil
+ shutil.rmtree("appli", ignore_errors=True)
+
+ # create config_appli.xml in current directory
+ salome_path = os.getenv("ROOT_SALOME", "")
+ salome_context_file = os.path.join(salome_path, "salome_context.cfg")
+ if not os.path.isfile(salome_context_file):
+ print "File salome_context.cfg not found."
+ print "Search path:" + salome_path
+ print "This test needs ROOT_SALOME environment variable in order to run"
+ exit(1)
+
+ config_appli_text = '''<application>
+<context path="''' + salome_context_file + '''"/>
+<modules>
+ <module name="KERNEL" path="'''
+ kernel_path = os.getenv("KERNEL_ROOT_DIR", "")
+ if not os.path.isdir(kernel_path) :
+ print "KERNEL_ROOT_DIR not defined"
+ exit(1)
+ pass
+
+ config_appli_text += kernel_path + '"/>'
+
+ # some tests need YACS module.
+ yacs_path = os.getenv("YACS_ROOT_DIR", "")
+ if os.path.isdir(yacs_path):
+ config_appli_text += '''
+ <module name="YACS" path="'''
+ config_appli_text += yacs_path + '"/>'
+ pass
+ config_appli_text += '''
+</modules>
+</application>'''
+
+ f = open("config_appli.xml", 'w')
+ f.write(config_appli_text)
+ f.close()
+
+ # create a SALOME application
+ appli_gen_file = os.path.join(kernel_path,
+ "bin","salome","appli_gen.py")
+ appli_dir = "appli"
+ os.system(appli_gen_file + " --prefix="+appli_dir+
+ " --config=config_appli.xml")
+
+ # start salome
+ import imp
+ sys.path[:0] = [os.path.join(appli_dir, "bin", "salome", "appliskel")]
+ self.salome_module = imp.load_source("SALOME", os.path.join(appli_dir, "salome"))
+ try:
+ self.salome_module.main(["start", "-t"])
+ except SystemExit, e:
+ # There is an exit() call in salome.main. Just ignore it.
+ pass
+
+ def run(self, script):
+ ret = 0
+ try:
+ ret = self.salome_module.main(["shell", script])
+ except SystemExit, e:
+ # return exit value
+ ret = e.code
+ return ret
+
+ def tearDown(self):
+ try:
+ self.salome_module.main(["killall"])
+ except SystemExit, e:
+ pass
+ pass
+ pass #class TestEnvironment
+
+if __name__ == '__main__':
+ import argparse
+ parser = argparse.ArgumentParser(description=usage,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+ parser.add_argument('command', help="Test command to be run.")
+ parser.add_argument('-d', '--define', action='append', help="VARIABLE=VALUE")
+
+ args = parser.parse_args()
+ for opt in args.define:
+ opts = opt.split('=', 1)
+ os.environ[opts[0]] = opts[1]
+
+ envTest = TestEnvironment()
+ envTest.setUp()
+ ret = envTest.run(args.command)
+ envTest.tearDown()
+ exit(ret)
+ pass
+