import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
from src.salomeTools import _BaseCommand
+import src.environment as ENVI
########################################################################
# Command class
options = self.getOptions()
# check for APPLICATION
- rc = UTS.check_config_has_application(config)
- if not rc.isOk(): return rc
+ returnCode = UTS.check_config_has_application(config)
+ if not returnCode.isOk(): return returnCode
application = config.VARS.application
logger.info(_("Building application for <header>%s<reset>\n") % application)
return RCO.ReturnCode("OK")
-
-# Creates an alias for runAppli.
def make_alias(appli_path, alias_path, force=False):
+ """Creates an alias for runAppli"""
assert len(alias_path) > 0, "Bad name for alias"
if os.path.exists(alias_path) and not force:
raise Exception(_("Cannot create the alias '%s'\n") % alias_path)
(module, has_gui, module_path))
return flagline
-##
-# Creates the config file to create an application with the list of modules.
def create_config_file(config, modules, env_file, logger):
-
+ """Creates the config file to create an application with the list of modules."""
samples = ""
if 'SAMPLES' in config.APPLICATION.products:
- samples = src.product.get_product_config(config, 'SAMPLES').source_dir
+ samples = PROD.get_product_config(config, 'SAMPLES').source_dir
- config_file = src.get_tmp_filename(config, "appli_config.xml")
+ config_file = UTS.get_tmp_filename(config, "appli_config.xml")
f = open(config_file, "w")
f.write('<application>\n')
flagline = False
for m in modules:
- mm = src.product.get_product_config(config, m)
- if src.product.product_is_smesh_plugin(mm):
+ mm = PROD.get_product_config(config, m)
+ if PROD.product_is_smesh_plugin(mm):
continue
if 'install_dir' in mm and bool(mm.install_dir):
- if src.product.product_is_cpp(mm):
+ if PROD.product_is_cpp(mm):
# cpp module
- for aa in src.product.get_product_components(mm):
+ for aa in PROD.get_product_components(mm):
install_dir = os.path.join(config.APPLICATION.workdir,
"INSTALL")
mp = os.path.join(install_dir, aa)
f.write(etree.tostring(document, encoding='utf-8'))
f.close()
-##
-# Generates the application with the config_file.
def generate_application(config, appli_dir, config_file, logger):
+ """Generates the application with the config_file."""
target_dir = os.path.dirname(appli_dir)
- install_KERNEL_dir = src.product.get_product_config(config,
+ install_KERNEL_dir = PROD.get_product_config(config,
'KERNEL').install_dir
script = os.path.join(install_KERNEL_dir, "bin", "salome", "appli_gen.py")
if not os.path.exists(script):
# Add SALOME python in the environment in order to avoid python version
# problems at appli_gen.py call
if 'Python' in config.APPLICATION.products:
- envi = src.environment.SalomeEnviron(config,
- src.environment.Environ(
- dict(os.environ)),
- True)
+ envi = ENVI.SalomeEnviron(config, ENVI.Environ(dict(os.environ)), True)
envi.set_a_product('Python', logger)
command = "python %s --prefix=%s --config=%s" % (script,
"""
return "%s %s " % (message, '.' * (pad - len(message.decode("UTF-8"))))
-##
-# Creates a SALOME application.
def create_application(config, appli_dir, catalog, logger, display=True):
-
+ """reates a SALOME application."""
SALOME_modules = get_SALOME_modules(config)
warn = ['KERNEL', 'GUI']
def get_SALOME_modules(config):
l_modules = []
for product in config.APPLICATION.products:
- product_info = src.product.get_product_config(config, product)
- if (src.product.product_is_SALOME(product_info) or
- src.product.product_is_generated(product_info)):
+ product_info = PROD.get_product_config(config, product)
+ if (PROD.product_is_SALOME(product_info) or
+ PROD.product_is_generated(product_info)):
l_modules.append(product)
return l_modules
logger.info(get_step(_("Creating environment files")))
status = "<KO>"
- VersionSalome = src.get_salome_version(config)
+ VersionSalome = UTS.get_salome_version(config)
if VersionSalome >= 820:
# for salome 8+ we use a salome context file for the virtual app
app_shell="cfg"
cmd = '"cat /proc/cpuinfo | grep MHz ; cat /proc/meminfo | grep MemTotal"'
user = getpass.getuser()
- catfile = src.get_tmp_filename(config, "CatalogResources.xml")
+ catfile = UTS.get_tmp_filename(config, "CatalogResources.xml")
catalog = file(catfile, "w")
catalog.write("""\
<!DOCTYPE ResourcesCatalog>
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
+import src.compilation as COMP
from src.salomeTools import _BaseCommand
CHECK_PROPERTY = "has_unit_tests"
# Construct the list of tuple containing
# the products name and their definition
- products_infos = src.product.get_products_infos(products, cfg)
+ products_infos = PROD.get_products_infos(products, cfg)
- products_infos = [pi for pi in products_infos if not(
- src.product.product_is_native(pi[1]) or
- src.product.product_is_fixed(pi[1]))]
+ products_infos = [pi for pi in products_infos \
+ if not(PROD.product_is_native(pi[1]) or PROD.product_is_fixed(pi[1])) ]
return products_infos
# Verify if the command has to be launched or not
ignored = False
msg += ""
- if not src.get_property_in_product_cfg(p_info, CHECK_PROPERTY):
+ if not UTS.get_property_in_product_cfg(p_info, CHECK_PROPERTY):
msg += _("The product %s is defined as not having tests: product ignored.\n") % p_name
ignored = True
if "build_dir" not in p_info:
msg += _("The product %s have no 'build_dir' key: product ignored.\n") % p_name
ignored = True
- if not src.product.product_compiles(p_info):
+ if not PROD.product_compiles(p_info):
msg += _("The product %s is defined as not compiling: product ignored.\n") % p_name
ignored = True
# Get the command to execute for script products
cmd_found = True
command = ""
- if src.product.product_has_script(p_info) and not ignored:
+ if PROD.product_has_script(p_info) and not ignored:
command = UTS.get_config_key(p_info, "test_build", "Not found")
if command == "Not found":
cmd_found = False
# Instantiate the class that manages all the construction commands
# like cmake, check, make install, make test, environment management, etc...
- builder = src.compilation.Builder(config, logger, p_info)
+ builder = COMP.Builder(config, logger, p_info)
# Prepare the environment
UTS.log_step(logger, header, "PREPARE ENV")
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
from src.salomeTools import _BaseCommand
# Compatibility python 2/3 for input function
l_dir_source = []
for __, product_info in products_infos:
if product_has_dir(product_info, without_dev):
- l_dir_source.append(src.Path(product_info.source_dir))
+ l_dir_source.append(UTS.Path(product_info.source_dir))
return l_dir_source
def get_build_directories(products_infos):
for __, product_info in products_infos:
if product_has_dir(product_info):
if "build_dir" in product_info:
- l_dir_build.append(src.Path(product_info.build_dir))
+ l_dir_build.append(UTS.Path(product_info.build_dir))
return l_dir_build
def get_install_directories(products_infos):
l_dir_install = []
for __, product_info in products_infos:
if product_has_dir(product_info):
- l_dir_install.append(src.Path(product_info.install_dir))
+ l_dir_install.append(UTS.Path(product_info.install_dir))
return l_dir_install
def product_has_dir(product_info, without_dev=False):
True if there is a source, build and install
directory corresponding to the product described by product_info.
"""
- if (src.product.product_is_native(product_info) or
- src.product.product_is_fixed(product_info)):
+ if (PROD.product_is_native(product_info) or \
+ PROD.product_is_fixed(product_info)):
return False
if without_dev:
- if src.product.product_is_dev(product_info):
+ if PROD.product_is_dev(product_info):
return False
return True
import src.returnCode as RCO
import src.utilsSat as UTS
import src.pyconf as PYCONF
+import src.product as PROD
from src.salomeTools import _BaseCommand
# Compatibility python 2/3 for input function
# Construct the list of tuple containing
# the products name and their definition
- products_infos = src.product.get_products_infos(products, cfg)
+ products_infos = PROD.get_products_infos(products, cfg)
- products_infos = [pi for pi in products_infos if not(
- src.product.product_is_fixed(pi[1]))]
+ products_infos = [pi for pi in products_infos if not(PROD.product_is_fixed(pi[1]))]
return products_infos
p_name, __ = p_name_p_info
# Get all products of the application
products = config.APPLICATION.products
- products_infos = src.product.get_products_infos(products, config)
+ products_infos = PROD.get_products_infos(products, config)
for p_name_potential_child, p_info_potential_child in products_infos:
- if ("depend" in p_info_potential_child and
- p_name in p_info_potential_child.depend):
+ if ("depend" in p_info_potential_child and \
+ p_name in p_info_potential_child.depend):
l_res.append(p_name_potential_child)
return l_res
"product_name" : p_name.name,
"appli_name" : config.VARS.application} )
raise Exception(msg)
- prod_info_child = src.product.get_product_config(config,
- child_name)
+ prod_info_child = PROD.get_product_config(config, child_name)
pname_pinfo_child = (prod_info_child.name, prod_info_child)
# Do not append the child if it is native or fixed and
# the corresponding parameter is called
if without_native_fixed:
- if not(src.product.product_is_native(prod_info_child) or
- src.product.product_is_fixed(prod_info_child)):
+ if not(PROD.product_is_native(prod_info_child) or \
+ PROD.product_is_fixed(prod_info_child)):
l_children.append(pname_pinfo_child)
else:
l_children.append(pname_pinfo_child)
"product_name" : p_name,
"appli_name" : config.VARS.application})
raise Exception(msg)
- prod_info_father = src.product.get_product_config(config,
- father_name)
+ prod_info_father = PROD.get_product_config(config, father_name)
pname_pinfo_father = (prod_info_father.name, prod_info_father)
# Do not append the father if it is native or fixed and
# the corresponding parameter is called
if without_native_fixed:
- if not(src.product.product_is_native(prod_info_father) or
- src.product.product_is_fixed(prod_info_father)):
+ if not(PROD.product_is_native(prod_info_father) or \
+ PROD.product_is_fixed(prod_info_father)):
l_fathers.append(pname_pinfo_father)
else:
l_fathers.append(pname_pinfo_father)
:param p_infos: (list)
List of (str, Config) => (product_name, product_info)
"""
- l_prod_sorted = src.deepcopy_list(p_infos)
+ l_prod_sorted = UTS.deepcopy_list(p_infos)
for prod in p_infos:
l_fathers = get_recursive_fathers(config,
prod,
return l_prod_sorted
def extend_with_fathers(config, p_infos):
- p_infos_res = src.deepcopy_list(p_infos)
+ p_infos_res = UTS.deepcopy_list(p_infos)
for p_name_p_info in p_infos:
fathers = get_recursive_fathers(config,
p_name_p_info,
return p_infos_res
def extend_with_children(config, p_infos):
- p_infos_res = src.deepcopy_list(p_infos)
+ p_infos_res = UTS.deepcopy_list(p_infos)
for p_name_p_info in p_infos:
children = get_recursive_children(config,
p_name_p_info,
l_depends_not_installed = []
fathers = get_recursive_fathers(config, p_name_p_info, without_native_fixed=True)
for p_name_father, p_info_father in fathers:
- if not(src.product.check_installation(p_info_father)):
+ if not(PROD.check_installation(p_info_father)):
l_depends_not_installed.append(p_name_father)
return l_depends_not_installed
continue
# Do nothing if the product is native
- if src.product.product_is_native(p_info):
+ if PROD.product_is_native(p_info):
UTS.log_step(logger, header, "native")
logger.info("\n")
continue
# Recompute the product information to get the right install_dir
# (it could change if there is a clean of the install directory)
- p_info = src.product.get_product_config(config, p_name)
+ p_info = PROD.get_product_config(config, p_name)
# Check if it was already successfully installed
- if src.product.check_installation(p_info):
+ if PROD.check_installation(p_info):
logger.info(_("Already installed\n"))
continue
# build_sources : cmake -> cmake, make, make install
# build_sources : script -> script executions
res = 0
- if (src.product.product_is_autotools(p_info) or
- src.product.product_is_cmake(p_info)):
+ if (PROD.product_is_autotools(p_info) or PROD.product_is_cmake(p_info)):
res, len_end_line, error_step = compile_product_cmake_autotools(sat,
p_name_info,
config,
logger,
header,
len_end)
- if src.product.product_has_script(p_info):
+ if PROD.product_has_script(p_info):
res, len_end_line, error_step = compile_product_script(sat,
p_name_info,
config,
# Logging and sat command call for make step
# Logging take account of the fact that the product has a compilation
# script or not
- if src.product.product_has_script(p_info):
+ if PROD.product_has_script(p_info):
# if the product has a compilation script,
# it is executed during make step
scrit_path_display = UTS.label(
compile_cfg.addMapping(prod_name,
PYCONF.Mapping(compile_cfg),
"")
- prod_dep_info = src.product.get_product_config(config, prod_name, False)
+ prod_dep_info = PROD.get_product_config(config, prod_name, False)
compile_cfg[prod_name] = prod_dep_info.version
# Write it in the install directory of the product
- compile_cfg_path = os.path.join(p_info.install_dir, src.CONFIG_FILENAME)
+ compile_cfg_path = os.path.join(p_info.install_dir, UTS.get_CONFIG_FILENAME())
f = open(compile_cfg_path, 'w')
compile_cfg.__save__(f)
f.close()
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
+import src.compilation as COMP
from src.salomeTools import _BaseCommand
########################################################################
# Construct the list of tuple containing
# the products name and their definition
- products_infos = src.product.get_products_infos(products, cfg)
+ products_infos = PROD.get_products_infos(products, cfg)
- products_infos = [pi for pi in products_infos if not(src.product.product_is_native(pi[1]) or src.product.product_is_fixed(pi[1]))]
+ products_infos = [pi for pi in products_infos \
+ if not(PROD.product_is_native(pi[1]) or PROD.product_is_fixed(pi[1]))]
return products_infos
# Instantiate the class that manages all the construction commands
# like cmake, make, make install, make test, environment management, etc...
- builder = src.compilation.Builder(config, logger, p_info)
+ builder = COMP.Builder(config, logger, p_info)
# Prepare the environment
UTS.log_step(logger, header, "PREPARE ENV")
# Execute buildconfigure, configure if the product is autotools
# Execute cmake if the product is cmake
res = 0
- if src.product.product_is_autotools(p_info):
+ if PROD.product_is_autotools(p_info):
UTS.log_step(logger, header, "BUILDCONFIGURE")
res_bc = builder.build_configure()
UTS.log_res_step(logger, res_bc)
res_c = builder.configure(conf_option)
UTS.log_res_step(logger, res_c)
res += res_c
- if src.product.product_is_cmake(p_info):
+ if PROD.product_is_cmake(p_info):
UTS.log_step(logger, header, "CMAKE")
res_cm = builder.cmake(conf_option)
UTS.log_res_step(logger, res_cm)
import src.returnCode as RCO
import src.utilsSat as UTS
from src.salomeTools import _BaseCommand
+import src.environment as ENVI
+import src.architecture as ARCH
# list of available shells with extensions
C_SHELLS = { "bash": "sh", "bat": "bat", "cfg" : "cfg" }
if options.shell == []:
shell = ["bash"]
- if src.architecture.is_windows():
+ if ARCH.is_windows():
shell = ["bat"]
else:
shell = options.shell
if shell not in C_SHELLS:
logger.warning(_("Unknown shell: %s\n") % shell)
else:
- shells_list.append(src.environment.Shell(shell, C_SHELLS[shell]))
+ shells_list.append(ENVI.Shell(shell, C_SHELLS[shell]))
- writer = src.environment.FileEnvWriter(config,
- logger,
- out_dir,
- src_root,
- env_info)
+ writer = ENVI.FileEnvWriter(config, logger, out_dir, src_root, env_info)
writer.silent = silent
files = []
for_build = True
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
from src.salomeTools import _BaseCommand
else:
# find all installation paths
all_products = cfg_APP.products.keys()
- l_product_cfg = src.product.get_products_infos(all_products, config)
+ l_product_cfg = PROD.get_products_infos(all_products, config)
l_dir_path = [pi.install_dir for tmp, pi in l_product_cfg]
# Get the files to ignore during the searching
directories_ignored = options.exclude_path
# Check the directories
- l_path = src.deepcopy_list(l_dir_path)
+ l_path = UTS.deepcopy_list(l_dir_path)
l_dir_path = []
for dir_path in l_path:
if not(os.path.isdir(dir_path)):
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
+import src.compilation as COMP
from src.salomeTools import _BaseCommand
import src.pyconf as PYCONF
+import src.environment as ENVI
########################################################################
# Command class
logger.info( _('Generation of SALOME modules for application %s\n') % \
UTS.label(config.VARS.application) )
- status = src.KO_STATUS
+ status = RCO._KO_STATUS
# verify that YACSGEN is available
returnCode = check_yacsgen(config, options.yacsgen, logger)
logger.error(_("Unknown product %s") % lprod)
continue
- pi = src.product.get_product_config(config, product)
- if not src.product.product_is_generated(pi):
+ pi = PROD.get_product_config(config, product)
+ if not PROD.product_is_generated(pi):
logger.info(_("not a generated product %s") % lprod)
continue
except Exception as exc:
result = str(exc)
- if result != src.OK_STATUS:
+ if result != RCO._OK_STATUS:
result = _("ERROR: %s") % result
details.append([product, result])
def generate_component_list(config, product_info, context, logger):
res = "?"
logger.info("\n")
- for compo in src.product.get_product_components(product_info):
+ for compo in PROD.get_product_components(product_info):
header = " %s %s " % (UTS.label(compo), "." * (20 - len(compo)))
res = generate_component(config,
compo,
config.PRODUCTS.addMapping(compo, PYCONF.Mapping(config), "")
config.PRODUCTS[compo].default = compo_info
- builder = src.compilation.Builder(config, logger, compo_info, check_src=False)
+ builder = COMP.Builder(config, logger, compo_info, check_src=False)
builder.header = header
# generate the component
# determine salome version
- VersionSalome = src.get_salome_version(config)
+ VersionSalome = UTS.get_salome_version(config)
if VersionSalome >= 750 :
use_autotools=False
builder.log('USE CMAKE', 3)
sys.stdout = logger.logTxtFile
sys.stderr = logger.logTxtFile
- if src.product.product_is_mpi(product_info):
+ if PROD.product_is_mpi(product_info):
salome_compo = module_generator.HXX2SALOMEParaComponent(hxxfile,
cpplib,
cpp_path)
cpplib,
cpp_path)
- if src.product.product_has_salome_gui(product_info):
+ if PROD.product_has_salome_gui(product_info):
# get files to build a template GUI
gui_files = salome_compo.getGUIfilesTemplate(compo)
else:
builder.log('BUID_CONFIGURE (no bootstrap)', 3)
g.bootstrap(compo_info.source_dir, logger.logTxtFile)
- result = src.OK_STATUS
+ result = RCO._OK_STATUS
finally:
sys.stdout = prevstdout
sys.stderr = prevstderr
# copy specified logo in generated component install directory
# rem : logo is not copied in source dir because this would require
# to modify the generated makefile
- logo_path = src.product.product_has_logo(product_info)
+ logo_path = PROD.product_has_logo(product_info)
if logo_path:
destlogo = os.path.join(compo_info.install_dir, "share", "salome",
"resources", compo.lower(), compo + ".png")
- src.Path(logo_path).copyfile(destlogo)
+ UTS.Path(logo_path).copyfile(destlogo)
return result
def build_context(config, logger):
products_list = [ 'KERNEL', 'GUI' ]
- ctxenv = src.environment.SalomeEnviron(config,
- src.environment.Environ(dict(
- os.environ)),
- True)
+ ctxenv = ENVI.SalomeEnviron(config, ENVI.Environ(dict(os.environ)), True)
ctxenv.silent = True
ctxenv.set_full_environ(logger, config.APPLICATION.products.keys())
yacsgen_dir = directory
yacs_src = _("Using YACSGEN from command line")
elif 'YACSGEN' in config.APPLICATION.products:
- yacsgen_info = src.product.get_product_config(config, 'YACSGEN')
+ yacsgen_info = PROD.get_product_config(config, 'YACSGEN')
yacsgen_dir = yacsgen_info.install_dir
yacs_src = _("Using YACSGEN from application")
elif os.environ.has_key("YACSGEN_ROOT_DIR"):
pv = os.getenv("PYTHON_VERSION")
if pv is None:
- python_info = src.product.get_product_config(config, "Python")
+ python_info = PROD.get_product_config(config, "Python")
pv = '.'.join(python_info.version.split('.')[:2])
assert pv is not None, "$PYTHON_VERSION not defined"
yacsgen_dir = os.path.join(yacsgen_dir, "lib", "python%s" % pv, "site-packages")
return 0
# Get the path
- path = src.Path(path_to_check)
+ path = UTS.Path(path_to_check)
# If it is a file, do nothing and return error
if path.isfile():
UTS.logger_info_tuples(logger, info)
# Read the config that is in the file
- config_jobs = src.read_config_from_a_file(file_jobs_cfg)
+ config_jobs = UTS.read_config_from_a_file(file_jobs_cfg)
# Find the job and its commands
found = False
if cmd_exe == "sat":
# use the salomeTools parser to get the options of the command
sat_parser = salomeTools.parser
- input_parser = src.remove_item_from_list(command.split(' ')[1:], "")
+ input_parser = UTS.remove_item_from_list(command.split(' ')[1:], "")
(options, argus) = sat_parser.parse_args(input_parser)
# Verify if there is a changed option
for attr in dir(options):
return RCO.ReturnCode("KO", msg)
l_conf_files_path.append(file_jobs_cfg)
# Read the config that is in the file
- one_config_jobs = src.read_config_from_a_file(file_jobs_cfg)
+ one_config_jobs = UTS.read_config_from_a_file(file_jobs_cfg)
merger.merge(config_jobs, one_config_jobs)
info = [(_("Platform"), config.VARS.dist),
# on every machine
name_pyconf = "_".join([os.path.basename(path)[:-len('.pyconf')]
for path in l_conf_files_path]) + ".pyconf"
- path_pyconf = src.get_tmp_filename(config, name_pyconf)
+ path_pyconf = UTS.get_tmp_filename(config, name_pyconf)
#Save config
f = file( path_pyconf , 'w')
config_jobs.__save__(f)
username=self.user,
password = self.password)
except self.paramiko.AuthenticationException:
- message = src.KO_STATUS + _("Authentication failed")
+ message = RCO._KO_STATUS + _("Authentication failed")
except self.paramiko.BadHostKeyException:
- message = (src.KO_STATUS +
- _("The server's host key could not be verified"))
+ message = (RCO._KO_STATUS + _("The server's host key could not be verified"))
except self.paramiko.SSHException:
- message = ( _("SSHException error connecting or "
- "establishing an SSH session"))
+ message = ( _("SSHException error connecting or establishing an SSH session"))
except:
message = ( _("Error connecting or establishing an SSH session"))
else:
cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\''
(_, out_pid, _) = self.machine.exec_command(cmd_pid, self.logger)
pids_cmd = out_pid.readlines()
- pids_cmd = [str(src.only_numbers(pid)) for pid in pids_cmd]
+ pids_cmd = [str(UTS.only_numbers(pid)) for pid in pids_cmd]
pids+=pids_cmd
return pids
return
# First get the file that contains the list of log files to get
- tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
+ tmp_file_path = UTS.get_tmp_filename(self.config, "list_log_files.txt")
remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt")
self.machine.sftp.get(remote_path, tmp_file_path)
self.logger.info(tiret_line + text_line + "|\n" + tiret_line)
# The infinite loop that runs the jobs
- l_jobs_not_started = src.deepcopy_list(self.ljobs)
+ l_jobs_not_started = UTS.deepcopy_list(self.ljobs)
while len(self._l_jobs_finished) != len(self.ljobs):
new_job_start = False
for host_port in self.lhosts:
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
from src.salomeTools import _BaseCommand
+import src.environment as ENVI
+import src.fileEnviron as FENV
########################################################################
# Command class
if options.name:
launcher_name = options.name
else:
- launcher_name = src.get_launcher_name(config)
+ launcher_name = UTS.get_launcher_name(config)
# set the launcher path
launcher_path = config.APPLICATION.workdir
# get KERNEL bin installation path
# (in order for the launcher to get python salomeContext API)
- kernel_cfg = src.product.get_product_config(config, "KERNEL")
- if not src.product.check_installation(kernel_cfg):
+ kernel_cfg = PROD.get_product_config(config, "KERNEL")
+ if not PROD.check_installation(kernel_cfg):
raise Exception(_("KERNEL is not installed"))
kernel_root_dir = kernel_cfg.install_dir
# set kernel bin dir (considering fhs property)
- if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
+ if UTS.get_property_in_product_cfg(kernel_cfg, "fhs"):
bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
else:
bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
# Get the launcher template
- withProfile = src.fileEnviron.withProfile\
- .replace("BIN_KERNEL_INSTALL_DIR", bin_kernel_install_dir)\
- .replace("KERNEL_INSTALL_DIR", kernel_root_dir)
+ withProfile = FENV.withProfile\
+ .replace("BIN_KERNEL_INSTALL_DIR", bin_kernel_install_dir)\
+ .replace("KERNEL_INSTALL_DIR", kernel_root_dir)
before, after = withProfile.split(
"# here your local standalone environment\n")
# create an environment file writer
- writer = src.environment.FileEnvWriter(config,
- logger,
- pathlauncher,
- src_root=None,
- env_info=None)
+ writer = ENVI.FileEnvWriter(config, logger, pathlauncher, src_root=None, env_info=None)
# Display some information
if display:
user = getpass.getuser()
# Create the catalog path
- catfile = src.get_tmp_filename(config, "CatalogResources.xml")
+ catfile = UTS.get_tmp_filename(config, "CatalogResources.xml")
catalog = file(catfile, "w")
# Write into it
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
+import src.compilation as COMP
+import src.architecture as ARCH
from src.salomeTools import _BaseCommand
########################################################################
# Construct the list of tuple containing
# the products name and their definition
- products_infos = src.product.get_products_infos(products, cfg)
+ products_infos = PROD.get_products_infos(products, cfg)
- products_infos = [pi for pi in products_infos if not(
- src.product.product_is_native(pi[1]) or
- src.product.product_is_fixed(pi[1]))]
+ products_infos = [pi for pi in products_infos \
+ if not(PROD.product_is_native(pi[1]) or PROD.product_is_fixed(pi[1]))]
return products_infos
# Instantiate the class that manages all the construction commands
# like cmake, make, make install, make test, environment management, etc...
- builder = src.compilation.Builder(config, logger, p_info)
+ builder = COMP.Builder(config, logger, p_info)
# Prepare the environment
UTS.log_step(logger, header, "PREPARE ENV")
nb_proc, make_opt_without_j = get_nb_proc(p_info, config, make_option)
UTS.log_step(logger, header, "MAKE -j" + str(nb_proc))
- if src.architecture.is_windows():
+ if ARCH.is_windows():
res = builder.wmake(nb_proc, make_opt_without_j)
else:
res = builder.make(nb_proc, make_opt_without_j)
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
+import src.compilation as COMP
from src.salomeTools import _BaseCommand
########################################################################
# Construct the list of tuple containing
# the products name and their definition
- products_infos = src.product.get_products_infos(products, cfg)
+ products_infos = PROD.get_products_infos(products, cfg)
- products_infos = [pi for pi in products_infos if not(src.product.product_is_native(pi[1]) or src.product.product_is_fixed(pi[1]))]
+ products_infos = [pi for pi in products_infos \
+ if not(PROD.product_is_native(pi[1]) or PROD.product_is_fixed(pi[1]))]
return products_infos
# Instantiate the class that manages all the construction commands
# like cmake, make, make install, make test, environment management, etc...
- builder = src.compilation.Builder(config, logger, p_info)
+ builder = COMP.Builder(config, logger, p_info)
# Prepare the environment
UTS.log_step(logger, header, "PREPARE ENV")
# Execute buildconfigure, configure if the product is autotools
# Execute cmake if the product is cmake
res = 0
- if not src.product.product_has_script(p_info):
+ if not PROD.product_has_script(p_info):
UTS.log_step(logger, header, "MAKE INSTALL")
res_m = builder.install()
UTS.log_res_step(logger, res_m)
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
from src.salomeTools import _BaseCommand
import src.pyconf as PYCONF
import src.utilsSat as UTS
+import src.environment as ENVI
+import src.architecture as ARCH
BINARY = "binary"
SOURCE = "Source"
kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
# set kernel bin dir (considering fhs property)
- kernel_cfg = src.product.get_product_config(config, "KERNEL")
- if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
+ kernel_cfg = PROD.get_product_config(config, "KERNEL")
+ if UTS.get_property_in_product_cfg(kernel_cfg, "fhs"):
bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
else:
bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
# Get the launcher template and do substitutions
- withProfile = src.fileEnviron.withProfile
+ withProfile = FENV.withProfile
withProfile = withProfile.replace(
"ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
" 'BIN_KERNEL_INSTALL_DIR'",
" out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
- before, after = withProfile.split(
- "# here your local standalone environment\n")
+ before, after = withProfile.split("# here your local standalone environment\n")
# create an environment file writer
- writer = src.environment.FileEnvWriter(config,
- logger,
- file_dir,
- src_root=None)
+ writer = ENVI.FileEnvWriter(config, logger, file_dir, src_root=None)
filepath = os.path.join(file_dir, file_name)
# open the file and write into it
launch_file.close()
# Little hack to put out_dir_Path outside the strings
- src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
+ UTS.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
# A hack to put a call to a file for distene licence.
# It does nothing to an application that has no distene product
:return: (list) The list of path of the produced environment files
"""
# create an environment file writer
- writer = src.environment.FileEnvWriter(config,
- logger,
- file_dir,
- src_root=None)
+ writer = ENVI.FileEnvWriter(config, logger, file_dir, src_root=None)
# Write
filepath = writer.write_env_file("env_launch.sh",
for_package = binaries_dir_name)
# Little hack to put out_dir_Path as environment variable
- src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
+ UTS.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
# change the rights in order to make the file executable for everybody
os.chmod(filepath,
text_to_add = ""
for product_name in get_SALOME_modules(config):
- product_info = src.product.get_product_config(config, product_name)
+ product_info = PROD.get_product_config(config, product_name)
- if src.product.product_is_smesh_plugin(product_info):
+ if PROD.product_is_smesh_plugin(product_info):
continue
if 'install_dir' in product_info and bool(product_info.install_dir):
- if src.product.product_is_cpp(product_info):
+ if PROD.product_is_cpp(product_info):
# cpp module
- for cpp_name in src.product.get_product_components(product_info):
+ for cpp_name in PROD.get_product_components(product_info):
line_to_add = ("<module name=\"" +
cpp_name +
"\" gui=\"yes\" path=\"''' + "
# Get the list of product installation to add to the archive
l_products_name = config.APPLICATION.products.keys()
- l_product_info = src.product.get_products_infos(l_products_name,
+ l_product_info = PROD.get_products_infos(l_products_name,
config)
l_install_dir = []
l_source_dir = []
# Add the sources of the products that have the property
# sources_in_package : "yes"
- if src.get_property_in_product_cfg(prod_info,
+ if UTS.get_property_in_product_cfg(prod_info,
"sources_in_package") == "yes":
if os.path.exists(prod_info.source_dir):
l_source_dir.append((prod_name, prod_info.source_dir))
l_sources_not_present.append(prod_name)
# ignore the native and fixed products for install directories
- if (src.product.product_is_native(prod_info)
- or src.product.product_is_fixed(prod_info)
- or not src.product.product_compiles(prod_info)):
+ if (PROD.product_is_native(prod_info)
+ or PROD.product_is_fixed(prod_info)
+ or not PROD.product_compiles(prod_info)):
continue
- if src.product.check_installation(prod_info):
+ if PROD.check_installation(prod_info):
l_install_dir.append((prod_name, prod_info.install_dir))
else:
l_not_installed.append(prod_name)
# Add also the cpp generated modules (if any)
- if src.product.product_is_cpp(prod_info):
+ if PROD.product_is_cpp(prod_info):
# cpp module
- for name_cpp in src.product.get_product_components(prod_info):
+ for name_cpp in PROD.get_product_components(prod_info):
install_dir = os.path.join(config.APPLICATION.workdir,
"INSTALL", name_cpp)
if os.path.exists(install_dir):
# for packages of SALOME applications including KERNEL,
# we produce a salome launcher or a virtual application (depending on salome version)
if 'KERNEL' in config.APPLICATION.products:
- VersionSalome = src.get_salome_version(config)
+ VersionSalome = UTS.get_salome_version(config)
# Case where SALOME has the launcher that uses the SalomeContext API
if VersionSalome >= 730:
# create the relative launcher and add it to the files to add
- launcher_name = src.get_launcher_name(config)
+ launcher_name = UTS.get_launcher_name(config)
launcher_package = produce_relative_launcher(config,
logger,
tmp_working_dir,
d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
# Add a sat symbolic link if not win
- if not src.architecture.is_windows():
+ if not ARCH.is_windows():
tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
try:
t = os.getcwd()
d_sat["sat link"] = (tmp_satlink_path, "sat")
- d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
+ d_source = UTS.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
return d_source
def get_archives(config, logger):
"""
# Get the list of product informations
l_products_name = config.APPLICATION.products.keys()
- l_product_info = src.product.get_products_infos(l_products_name,
+ l_product_info = PROD.get_products_infos(l_products_name,
config)
d_archives = {}
l_pinfo_vcs = []
for p_name, p_info in l_product_info:
# ignore the native and fixed products
- if (src.product.product_is_native(p_info)
- or src.product.product_is_fixed(p_info)):
+ if (PROD.product_is_native(p_info)
+ or PROD.product_is_fixed(p_info)):
continue
if p_info.get_source == "archive":
archive_path = p_info.archive_info.archive_name
The path to the local salomeTools directory to add in the package
"""
# Copy sat in the temporary working directory
- sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
- sat_running_path = src.Path(config.VARS.salometoolsway)
+ sat_tmp_path = UTS.Path(os.path.join(tmp_working_dir, "salomeTools"))
+ sat_running_path = UTS.Path(config.VARS.salometoolsway)
sat_running_path.copy(sat_tmp_path)
# Update the local.pyconf file that contains the path to the project
# (compilation, environment, patches)
# and create the pyconf file to add to the project
lproducts_name = config.APPLICATION.products.keys()
- l_products = src.product.get_products_infos(lproducts_name, config)
+ l_products = PROD.get_products_infos(lproducts_name, config)
for p_name, p_info in l_products:
find_product_scripts_and_pyconf(p_name,
p_info,
product_pyconf_cfg = PYCONF.Config(product_pyconf_path)
# find the compilation script if any
- if src.product.product_has_script(p_info):
- compil_script_path = src.Path(p_info.compil_script)
+ if PROD.product_has_script(p_info):
+ compil_script_path = UTS.Path(p_info.compil_script)
compil_script_path.copy(compil_scripts_tmp_dir)
product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
p_info.compil_script)
# find the environment script if any
- if src.product.product_has_env_script(p_info):
- env_script_path = src.Path(p_info.environ.env_script)
+ if PROD.product_has_env_script(p_info):
+ env_script_path = UTS.Path(p_info.environ.env_script)
env_script_path.copy(env_scripts_tmp_dir)
product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
p_info.environ.env_script)
# find the patches if any
- if src.product.product_has_patches(p_info):
+ if PROD.product_has_patches(p_info):
patches = PYCONF.Sequence()
for patch_path in p_info.patches:
- p_path = src.Path(patch_path)
+ p_path = UTS.Path(patch_path)
p_path.copy(patches_tmp_dir)
patches.append(os.path.basename(patch_path), "")
info][key]
else:
# if the product is not archive, then make it become archive.
- if src.product.product_is_vcs(p_info):
+ if PROD.product_is_vcs(p_info):
product_pyconf_cfg[p_info.section].get_source = "archive"
if not "archive_info" in product_pyconf_cfg[p_info.section]:
product_pyconf_cfg[p_info.section].addMapping("archive_info",
d['application'] = config.VARS.application
f.write("# Application: " + d['application'] + "\n")
if 'KERNEL' in config.APPLICATION.products:
- VersionSalome = src.get_salome_version(config)
+ VersionSalome = UTS.get_salome_version(config)
# Case where SALOME has the launcher that uses the SalomeContext API
if VersionSalome >= 730:
d['launcher'] = config.APPLICATION.profile.launcher_name
UTS.check_config_has_application(config).raiseIfKo()
l_product_to_remove = []
for product_name in config.APPLICATION.products.keys():
- prod_cfg = src.product.get_product_config(config, product_name)
- if src.get_property_in_product_cfg(prod_cfg, prop) == value:
+ prod_cfg = PROD.get_product_config(config, product_name)
+ if UTS.get_property_in_product_cfg(prod_cfg, prop) == value:
l_product_to_remove.append(product_name)
for product_name in l_product_to_remove:
config.APPLICATION.products.__delitem__(product_name)
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
from src.salomeTools import _BaseCommand
import commands.prepare
"""
# if the product is native, do not apply patch
- if src.product.product_is_native(product_info):
+ if PROD.product_is_native(product_info):
# display and log
logger.info('%s: ' % UTS.label(product_info.name))
logger.info(' ' * (max_product_name_len - len(product_info.name)))
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
from src.salomeTools import _BaseCommand
########################################################################
for p_name, __ in products_infos:
args_product_opt += ',' + p_name
- ldev_products = [p for p in products_infos if src.product.product_is_dev(p[1])]
+ ldev_products = [p for p in products_infos if PROD.product_is_dev(p[1])]
args_product_opt_clean = args_product_opt
if not options.force and len(ldev_products) > 0:
l_products_not_getted = find_products_already_getted(ldev_products)
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
import src.pyconf as PYCONF
from src.salomeTools import _BaseCommand
Generates the sources of the profile
"""
#Check script app-quickstart.py exists
- kernel_cfg = src.product.get_product_config(config, "KERNEL")
+ kernel_cfg = PROD.get_product_config(config, "KERNEL")
kernel_root_dir = kernel_cfg.install_dir
- if not src.product.check_installation(kernel_cfg):
+ if not PROD.check_installation(kernel_cfg):
raise Exception(_("KERNEL is not installed"))
script = os.path.join(kernel_root_dir,"bin","salome","app-quickstart.py")
if not os.path.exists( script ):
raise Exception( _("KERNEL's install has not the script app-quickstart.py") )
# Check that GUI is installed
- gui_cfg = src.product.get_product_config(config, "GUI")
+ gui_cfg = PROD.get_product_config(config, "GUI")
gui_root_dir = gui_cfg.install_dir
- if not src.product.check_installation(gui_cfg):
+ if not PROD.check_installation(gui_cfg):
raise Exception(_("GUI is not installed"))
#Set prefix option passed to app-quickstart.py
UTS.check_config_has_application(config).raiseIfKo()
# Determine launcher path
- launcher_name = src.get_launcher_name(config)
+ launcher_name = UTS.get_launcher_name(config)
launcher_dir = config.APPLICATION.workdir
# Check the launcher existence
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
+import src.compilation as COMP
from src.salomeTools import _BaseCommand
########################################################################
# Construct the list of tuple containing
# the products name and their definition
- products_infos = src.product.get_products_infos(products, cfg)
+ products_infos = PROD.get_products_infos(products, cfg)
- products_infos = [pi for pi in products_infos if not(
- src.product.product_is_native(pi[1]) or
- src.product.product_is_fixed(pi[1]))]
+ products_infos = [pi for pi in products_infos \
+ if not(PROD.product_is_native(pi[1]) or PROD.product_is_fixed(pi[1]))]
return products_infos
test1 = "properties" in p_info and \
"compilation" in p_info.properties and \
p_info.properties.compilation == "no"
- if ( test1 or (not src.product.product_has_script(p_info)) ):
+ if ( test1 or (not PROD.product_has_script(p_info)) ):
UTS.log_step(logger, header, "ignored")
logger.info("\n")
return 0
# Instantiate the class that manages all the construction commands
# like cmake, make, make install, make test, environment management, etc...
- builder = src.compilation.Builder(config, logger, p_info)
+ builder = COMP.Builder(config, logger, p_info)
# Prepare the environment
UTS.log_step(logger, header, "PREPARE ENV")
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
from src.salomeTools import _BaseCommand
import src.system as SYSS
+import src.environment as ENVI
########################################################################
return False
logger.info('DIR: %s ... ' % UTS.info(product_info.dir_info.dir))
- retcode = src.Path(product_info.dir_info.dir).copy(source_dir)
+ retcode = UTS.Path(product_info.dir_info.dir).copy(source_dir)
return retcode
def get_source_from_cvs(user,
# Get the application environment
logger.info(_("Set the application environment\n"))
- env_appli = src.environment.SalomeEnviron(config,
- src.environment.Environ(dict(os.environ)))
+ env_appli = ENVI.SalomeEnviron(config, ENVI.Environ(dict(os.environ)))
env_appli.set_application_env(logger)
# Call the right function to get sources regarding the product settings
for product_name, product_info in products:
# get product name, product informations and the directory where to put
# the sources
- if (not (src.product.product_is_fixed(product_info) or
- src.product.product_is_native(product_info))):
- source_dir = src.Path(product_info.source_dir)
+ if (not (PROD.product_is_fixed(product_info) or
+ PROD.product_is_native(product_info))):
+ source_dir = UTS.Path(product_info.source_dir)
else:
- source_dir = src.Path('')
+ source_dir = UTS.Path('')
# display and log
logger.info('%s: ' % UTS.label(product_name))
# Remove the existing source directory if
# the product is not in development mode
- is_dev = src.product.product_is_dev(product_info)
+ is_dev = PROD.product_is_dev(product_info)
if source_dir.exists():
logger.info("<OK>\n")
msg = _("Nothing done because source directory existing yet.\n")
res = "<KO>"
# print the result
- if not(src.product.product_is_fixed(product_info) or
- src.product.product_is_native(product_info)):
+ if not(PROD.product_is_fixed(product_info) or
+ PROD.product_is_native(product_info)):
logger.info('%s\n' % res)
return good_result, results
from src.salomeTools import _BaseCommand
import src.ElementTree as etree
import src.xmlManager as XMLMGR
+import src.architecture as ARCH
try:
from hashlib import sha1
if not options.launcher:
options.launcher = ""
elif not os.path.isabs(options.launcher):
- if not src.config_has_application(config):
- raise Exception(
- _("An application is required to use a relative path with option --appli") )
+ returnCode = UTS.check_config_has_application(config)
+ if not returnCode.isOk():
+ msg = _("An application is required to use a relative path with option --appli")
+ raise Exception(msg)
options.launcher = os.path.join(config.APPLICATION.workdir, options.launcher)
-
if not os.path.exists(options.launcher):
- raise Exception(
- _("Launcher not found: %s") % options.launcher )
+ raise Exception(_("Launcher %s not found") % options.launcher )
return
def run(self, cmd_arguments):
# Get some information to put in the xml file
application_name = config.VARS.application
- withappli = src.config_has_application(config)
+ withappli = UTS.check_config_has_application(config).isOk()
first_time = False
if not os.path.exists(xml_history_path):
prod_node.remove(node)
ASNODE(prod_node, "version_to_download", config.APPLICATION.name)
-
ASNODE(prod_node, "out_dir", config.APPLICATION.workdir)
# add environment
if 'callback' in script:
try:
cnode = ASNODE(tn, "callback")
- if src.architecture.is_windows():
+ if ARCH.is_windows():
import string
cnode.text = filter(
lambda x: x in string.printable,
# calculate status
nb += 1
- if script.res == src.OK_STATUS: nb_pass += 1
- elif script.res == src.TIMEOUT_STATUS: nb_timeout += 1
- elif script.res == src.KO_STATUS: nb_failed += 1
+ if script.res == RCO._OK_STATUS: nb_pass += 1
+ elif script.res == RCO._TIMEOUT_STATUS: nb_timeout += 1
+ elif script.res == RCO._KO_STATUS: nb_failed += 1
else: nb_not_run += 1
if "known_error" in script:
if overdue:
kf_script.attrib['overdue'] = str(overdue)
- elif script.res == src.KO_STATUS:
+ elif script.res == RCO._KO_STATUS:
new_err = ASNODE(new_errors, "new_error")
script_path = os.path.join(test.grid,
test.session, script.name)
XMLMGR.write_report(os.path.join(dest_path, xmlname), root, "test.xsl")
XMLMGR.write_report(xml_history_path, root, "test_history.xsl")
- return src.OK_STATUS
+ return RCO._OK_STATUS
def generate_history_xml_path(config, test_base):
"""
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+"""
+utilities to build and compile
+
+Usage:
+>> import src.compilation as COMP
+"""
+
import os
import subprocess
import sys
from src.options import OptResult
import src.utilsSat as UTS
+import src.product as PROD
+import src.environment as ENVI
+import src.architecture as ARCH
C_COMPILE_ENV_LIST = "CC CXX F77 CFLAGS CXXFLAGS LIBS LDFLAGS".split()
self.logger = logger
self.options = options
self.product_info = product_info
- self.build_dir = src.Path(self.product_info.build_dir)
- self.source_dir = src.Path(self.product_info.source_dir)
- self.install_dir = src.Path(self.product_info.install_dir)
+ self.build_dir = UTS.Path(self.product_info.build_dir)
+ self.source_dir = UTS.Path(self.product_info.source_dir)
+ self.install_dir = UTS.Path(self.product_info.install_dir)
self.header = ""
self.debug_mode = False
if "debug" in self.product_info and self.product_info.debug == "yes":
self.log('\n', 4)
# add products in depend and opt_depend list recursively
- environ_info = src.product.get_product_dependencies(self.config,
- self.product_info)
+ environ_info = PROD.get_product_dependencies(self.config, self.product_info)
#environ_info.append(self.product_info.name)
# create build environment
- self.build_environ = src.environment.SalomeEnviron(self.config,
- src.environment.Environ(dict(os.environ)),
- True)
+ self.build_environ = ENVI.SalomeEnviron(
+ self.config, ENVI.Environ(dict(os.environ)), True)
self.build_environ.silent = (self.config.USER.output_verbose_level < 5)
self.build_environ.set_full_environ(self.logger, environ_info)
# create runtime environment
- self.launch_environ = src.environment.SalomeEnviron(self.config,
- src.environment.Environ(dict(os.environ)),
- False)
+ self.launch_environ = ENVI.SalomeEnviron(
+ self.config, ENVI.Environ(dict(os.environ)), False)
self.launch_environ.silent = True # no need to show here
self.launch_environ.set_full_environ(self.logger, environ_info)
##
# Runs 'make_check'.
def check(self, command=""):
- if src.architecture.is_windows():
+ if ARCH.is_windows():
cmd = 'msbuild RUN_TESTS.vcxproj'
else :
if self.product_info.build_source=="autotools" :
def do_batch_script_build(self, script, nb_proc):
- if src.architecture.is_windows():
+ if ARCH.is_windows():
make_options = "/maxcpucount:%s" % nb_proc
else :
make_options = "-j%s" % nb_proc
import src.debug as DBG
import src.loggingSat as LOG
import src.returnCode as RCO
-import src.architecture as ARCH
import src.utilsSat as UTS
import src.pyconf as PYCONF
import src.product as PROD
import src.environment as ENVI
import src.fileEnviron as FENV
+import src.architecture as ARCH
class ConfigOpener:
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+"""
+"""
+
import os
import subprocess
import string
# create then env object
env_file = open(os.path.join(self.out_dir, filename), "w")
- tmp = src.fileEnviron.get_file_environ(env_file, shell, {})
+ tmp = FENV.get_file_environ(env_file, shell, {})
env = SalomeEnviron(self.config, tmp, forBuild, for_package=for_package)
env.silent = self.silent
self.logger.info(_("Create configuration file %s\n") % UTS.label(filename.name))
# create then env object
- tmp = src.fileEnviron.get_file_environ(filename,
- "cfgForPy",
- {})
+ tmp = FENV.get_file_environ(filename, "cfgForPy", {})
# environment for launch
env = SalomeEnviron(self.config,
tmp,
"""
Contains the methods
relative to the product notion of salomeTools
+
+| usage:
+| >> import src.product as PROD
"""
import os
import src.pyconf as PYCONF
import src.utilsSat as UTS
+import src.architecture as ARCH
AVAILABLE_VCS = ['git', 'svn', 'cvs']
config_expression = "^config-\d+$"
raise Exception(
_("Compilation script not found: %s") % script_name)
prod_info.compil_script = script_path
- if src.architecture.is_windows():
+ if ARCH.is_windows():
prod_info.compil_script = prod_info.compil_script[:-len(".sh")] + ".bat"
# Check that the script is executable
if VERSION_DELIMITER in section_name]
for section_range in l_section_ranges:
minimum, maximum = section_range.split(VERSION_DELIMITER)
- if (src.only_numbers(version) >= src.only_numbers(minimum)
- and src.only_numbers(version) <= src.only_numbers(maximum)):
+ if UTS.only_numbers(version) >= UTS.only_numbers(minimum) and \
+ UTS.only_numbers(version) <= UTS.only_numbers(maximum):
# returns specific information for the versions
prod_info = config.PRODUCTS[product_name][section_range]
prod_info.section = section_range
continue
# check if there is the file sat-config.pyconf file in the installation
# directory
- config_file = os.path.join(prod_dir, dir_or_file, src.CONFIG_FILENAME)
+ config_file = os.path.join(prod_dir, dir_or_file, UTS.get_CONFIG_FILENAME())
if not os.path.exists(config_file):
continue
import pprint as PP
+_OK_STATUS = "OK"
+_KO_STATUS = "KO"
+_NA_STATUS = "NA" # not applicable
+_UNKNOWN_STATUS = "ND" # not defined
+_KNOWNFAILURE_STATUS = "KF"
+_TIMEOUT_STATUS = "TIMEOUT"
+
#####################################################
class ReturnCode(object):
"""
>> print("long returnCode string with value", repr(rcFinal)) # KO!
"""
- OK_STATUS = "OK"
- KO_STATUS = "KO"
- NA_STATUS = "NA" # not applicable
- UNKNOWN_STATUS = "ND" # not defined
- KNOWNFAILURE_STATUS = "KF"
- TIMEOUT_STATUS = "TIMEOUT"
+ OK_STATUS = _OK_STATUS
+ KO_STATUS = _OK_STATUS
+ NA_STATUS = _NA_STATUS # not applicable
+ UNKNOWN_STATUS = _UNKNOWN_STATUS # not defined
+ KNOWNFAILURE_STATUS = _KNOWNFAILURE_STATUS
+ TIMEOUT_STATUS = _TIMEOUT_STATUS
# integer for sys.exit(anInt)
# OKSYS and KOSYS seems equal on linux or windows
# sat <options> <args>
# (the list of possible options is at the beginning of this file)
-
- self.CONFIG_FILENAME = "sat-config.pyconf"
-
self.configManager = None # the config Manager that will be used to set self.config
self.config = None # the config that will be read using pyconf module
self.logger = logger # the logger that will be use
import subprocess
import src.pyconf as PYCONF
+import src.returnCode as RCO
import src.utilsSat as UTS
+import src.product as PROD
+import src.environment as ENVI
+import src.architecture as ARCH
# directories not considered as test grids
C_IGNORE_GRIDS = ['.git', '.svn', 'RESSOURCES']
# Get directory to be used for the temporary files.
#
def getTmpDirDEFAULT():
- if src.architecture.is_windows():
+ if ARCH.is_windows():
directory = os.getenv("TEMP")
else:
# for Linux: use /tmp/logs/{user} folder
'dir': testbase_name }
self.logger.debug("> %s" % cmd)
- if src.architecture.is_windows():
+ if ARCH.is_windows():
# preexec_fn not supported on windows platform
res = subprocess.call(cmd,
cwd=os.path.join(self.tmp_working_dir, 'BASES'),
# Get the application environment
self.logger.debug(_("Set the application environment"))
- env_appli = src.environment.SalomeEnviron(self.config,
- src.environment.Environ(dict(os.environ)))
+ env_appli = ENVI.SalomeEnviron(self.config, ENVI.Environ(dict(os.environ)))
env_appli.set_application_env(self.logger)
self.logger.debug("> %s" % cmd)
- if src.architecture.is_windows():
+ if ARCH.is_windows():
# preexec_fn not supported on windows platform
res = subprocess.call(cmd,
cwd=os.path.join(self.tmp_working_dir, 'BASES'),
self.currentTestBase = test_base_name
- ##
- # Searches if the script is declared in known errors pyconf.
- # Update the status if needed.
+
def search_known_errors(self, status, test_grid, test_session, test):
+ """
+ Searches if the script is declared in known errors pyconf.
+ Update the status if needed.
+ """
test_path = os.path.join(test_grid, test_session, test)
- if not src.config_has_application(self.config):
+ if not UTS.check_config_has_application(self.config).isOk():
return status, []
if self.known_errors is None:
if error is None:
return status, []
- if status == src.OK_STATUS:
+ if status == RCO._OK_STATUS:
if not error.fixed:
# the error is fixed
self.known_errors.fix_error(error)
delta = self.known_errors.get_expecting_days(error)
kfres = [ error.date, error.expected, error.comment, error.fixed ]
if delta < 0:
- return src.KO_STATUS, kfres
- return src.KNOWNFAILURE_STATUS, kfres
+ return RCO._KO_STATUS, kfres
+ return RCO._KNOWNFAILURE_STATUS, kfres
##
# Read the *.result.py files.
gdic, ldic = {}, {}
execfile(resfile, gdic, ldic)
- status = src.TIMEOUT_STATUS
+ status = RCO._TIMEOUT_STATUS
if not has_timed_out:
- status = src.KO_STATUS
+ status = RCO._KO_STATUS
if ldic.has_key('status'):
status = ldic['status']
expected = []
- if status == src.KO_STATUS or status == src.OK_STATUS:
+ if status == RCO._KO_STATUS or status == RCO._OK_STATUS:
status, expected = self.search_known_errors(status,
self.currentgrid,
self.currentsession,
callback = ""
if ldic.has_key('callback'):
callback = ldic['callback']
- elif status == src.KO_STATUS:
+ elif status == RCO._KO_STATUS:
callback = "CRASH"
exec_time = -1
def get_tmp_dir(self):
# Rare case where there is no KERNEL in grid list
# (for example MED_STANDALONE)
- if ('APPLICATION' in self.config
- and 'KERNEL' not in self.config.APPLICATION.products
- and 'KERNEL_ROOT_DIR' not in os.environ):
+ if ('APPLICATION' in self.config and \
+ 'KERNEL' not in self.config.APPLICATION.products and \
+ 'KERNEL_ROOT_DIR' not in os.environ):
return getTmpDirDEFAULT
# Case where "sat test" is launched in an existing SALOME environment
if 'KERNEL_ROOT_DIR' in os.environ:
root_dir = os.environ['KERNEL_ROOT_DIR']
- if ('APPLICATION' in self.config
- and 'KERNEL' in self.config.APPLICATION.products):
- root_dir = src.product.get_product_config(self.config,
- "KERNEL").install_dir
+ if ('APPLICATION' in self.config and \
+ 'KERNEL' in self.config.APPLICATION.products):
+ root_dir = PROD.get_product_config(self.config, "KERNEL").install_dir
# Case where there the appli option is called (with path to launcher)
if len(self.launcher) > 0:
stdout=subprocess.PIPE,
shell=True,
executable='/bin/bash').communicate()
- print "TRACES OP - test_module.py/Test.get_tmp_dir() subproc_res = "
+
for resLine in subproc_res:
print "- '#%s#'" % resLine
root_dir = subproc_res[0].split()[-1]
- # OP 14/11/2017 Ajout de traces pour essayer de decouvrir le pb
- # de remontee de log des tests
- print "TRACES OP - test_module.py/Test.get_tmp_dir() root_dir = '#%s#'" % root_dir
# import grid salome_utils from KERNEL that gives
# the right getTmpDir function
binSalome = "runSalome"
binPython = "python"
killSalome = "killSalome.py"
- src.environment.load_environment(self.config, False, self.logger)
+ ENVI.load_environment(self.config, False, self.logger)
return binSalome, binPython, killSalome
# Case where there the appli option is called (with path to launcher)
return binSalome, binPython, killSalome
# SALOME version detection and APPLI repository detection
- VersionSalome = src.get_salome_version(self.config)
+ VersionSalome = UTS.get_salome_version(self.config)
appdir = 'APPLI'
if "APPLI" in self.config and "application_name" in self.config.APPLI:
appdir = self.config.APPLI.application_name
"runAppli")
binPython = "python"
killSalome = "killSalome.py"
- src.environment.load_environment(self.config, False, self.logger)
+ ENVI.load_environment(self.config, False, self.logger)
return binSalome, binPython, killSalome
# Case where SALOME has the launcher that uses the SalomeContext API
else:
- launcher_name = src.get_launcher_name(self.config)
+ launcher_name = UTS.get_launcher_name(self.config)
binSalome = os.path.join(self.config.APPLICATION.workdir,
launcher_name)
script_info.name = sr
script_info.res = script_results[sr][0]
script_info.time = script_results[sr][1]
- if script_info.res == src.TIMEOUT_STATUS:
+ if script_info.res == RCO._TIMEOUT_STATUS:
script_info.time = time_out
if script_info.time < 1e-3: script_info.time = 0
callback = script_results[sr][2]
- if script_info.res != src.OK_STATUS and len(callback) > 0:
+ if script_info.res != RCO._OK_STATUS and len(callback) > 0:
script_info.callback = callback
kfres = script_results[sr][3]
self.logger.error("Exception in %s\n%s" % \
(script_info.name, UTS.red(callback)))
- if script_info.res == src.OK_STATUS:
+ if script_info.res == RCO._OK_STATUS:
self.nb_succeed += 1
- elif script_info.res == src.KNOWNFAILURE_STATUS:
+ elif script_info.res == RCO._KNOWNFAILURE_STATUS:
self.nb_acknoledge += 1
- elif script_info.res == src.TIMEOUT_STATUS:
+ elif script_info.res == RCO._TIMEOUT_STATUS:
self.nb_timeout += 1
- elif script_info.res == src.NA_STATUS:
+ elif script_info.res == RCO._NA_STATUS:
self.nb_run -= 1
elif script_info.res == "?":
self.nb_not_run += 1
##############################################################################
# file system utilities
##############################################################################
+def get_CONFIG_FILENAME():
+ """get initial config.pyconf"""
+ return "sat-config.pyconf"
+
def ensure_path_exists(path):
"""Create a path if not existing
:param inConfig: (Config or Mapping etc) The in-Config node.
:param key: (str) the name of the parameter to get the value
:param default: (str) The value to return if key is not in-Config
- :return: if supposedly leaf (str),else (in-Config Node)
+ :return: (if supposedly leaf (str),else (in-Config Node)
"""
if check_has_key(inConfig, key).isOk():
return inConfig[key]
:return: (str) The path of the logs.
"""
if "log_dir" not in config.LOCAL:
- local_file_path = os.path.join(config.VARS.salometoolsway,
- "data",
- "local.pyconf")
+ local_file_path = os.path.join(
+ config.VARS.salometoolsway, "data", "local.pyconf" )
msg = _("Please define a log_dir in the file %s") % local_file_path
raise Exception(msg)
log_dir_path = os.path.abspath(config.LOCAL.log_dir)
-
return log_dir_path
def get_salome_version(config):
else:
KERNEL_info = product.get_product_config(config, "KERNEL")
VERSION = os.path.join(
- KERNEL_info.install_dir,
- "bin",
- "salome",
- "VERSION")
+ KERNEL_info.install_dir, "bin", "salome", "VERSION" )
if not os.path.isfile(VERSION):
return None