IGNORED_DIRS = [".git", ".svn"]
IGNORED_EXTENSIONS = []
+PACKAGE_EXT=".tar.gz" # the extension we use for the packages
+
PROJECT_TEMPLATE = """#!/usr/bin/env python
#-*- coding:utf-8 -*-
'''
# get KERNEL installation path
- kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
+ kernel_info = src.product.get_product_config(config, "KERNEL")
+ kernel_base_name=os.path.basename(kernel_info.install_dir)
+ if kernel_base_name.startswith("config"):
+ # case of kernel installed in base. We remove "config-i"
+ kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
+
+ kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
# set kernel bin dir (considering fhs property)
kernel_cfg = src.product.get_product_config(config, "KERNEL")
# Little hack to put out_dir_Path outside the strings
src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
- src.replace_in_file(filepath, "'out_dir_Path + ", "out_dir_Path + '" )
+ src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
# A hack to put a call to a file for distene licence.
# It does nothing to an application that has no distene product
del text[num_line +1]
del text[num_line +1]
text_to_insert =""" try:
- distene_licence_file="%s"
+ distene_licence_file=r"%s"
if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
import importlib.util
spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
for_package = binaries_dir_name)
# Little hack to put out_dir_Path as environment variable
- src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
+ if src.architecture.is_windows() :
+ src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
+ src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
+ else:
+ src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
# change the rights in order to make the file executable for everybody
os.chmod(filepath,
if len(l_not_installed) > 0:
text_missing_prods = ""
for p_name in l_not_installed:
- text_missing_prods += "-" + p_name + "\n"
+ text_missing_prods += " - " + p_name + "\n"
if not options.force_creation:
- msg = _("ERROR: there are missing products installations:")
+ msg = _("ERROR: there are missing product installations:")
logger.write("%s\n%s" % (src.printcolors.printcError(msg),
text_missing_prods),
1)
- return None
+ raise src.SatException(msg)
else:
msg = _("WARNING: there are missing products installations:")
logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
for p_name in l_sources_not_present:
text_missing_prods += "-" + p_name + "\n"
if not options.force_creation:
- msg = _("ERROR: there are missing products sources:")
+ msg = _("ERROR: there are missing product sources:")
logger.write("%s\n%s" % (src.printcolors.printcError(msg),
text_missing_prods),
1)
- return None
+ raise src.SatException(msg)
else:
msg = _("WARNING: there are missing products sources:")
logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
1)
# construct the name of the directory that will contain the binaries
- binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
-
+ if src.architecture.is_windows():
+ binaries_dir_name = config.INTERNAL.config.binary_dir
+ else:
+ binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
# construct the correlation table between the product names, there
# actual install directories and there install directory in archive
d_products = {}
for prod_name, install_dir in l_install_dir:
- path_in_archive = os.path.join(binaries_dir_name, os.path.basename(install_dir))
+ prod_base_name=os.path.basename(install_dir)
+ if prod_base_name.startswith("config"):
+ # case of a products installed in base. We remove "config-i"
+ prod_base_name=os.path.basename(os.path.dirname(install_dir))
+ path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
for prod_name, source_dir in l_source_dir:
else:
filename = "env_launch.sh"
d_products["environment file"] = (env_file, filename)
-
return d_products
def source_package(sat, config, logger, options, tmp_working_dir):
:return: The path of the resulting archive
:rtype: str
'''
- path_targz_prod = os.path.join(where, prod_name + ".tgz")
+ path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
local_path = prod_info.source_dir
tar_prod.add(local_path,
patches_tmp_dir,
products_pyconf_tmp_dir)
- find_application_pyconf(config, application_tmp_dir)
+ # for the application pyconf, we write directly the config
+ # don't search for the original pyconf file
+ # to avoid problems with overwrite sections and rm_products key
+ write_application_pyconf(config, application_tmp_dir)
d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
return d_project
'''
# read the pyconf of the product
- product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
- config.PATHS.PRODUCTPATH)
- product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
+ product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
# find the compilation script if any
if src.product.product_has_script(p_info):
compil_script_path = src.Path(p_info.compil_script)
compil_script_path.copy(compil_scripts_tmp_dir)
- product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
- p_info.compil_script)
+
# find the environment script if any
if src.product.product_has_env_script(p_info):
env_script_path = src.Path(p_info.environ.env_script)
env_script_path.copy(env_scripts_tmp_dir)
- product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
- p_info.environ.env_script)
+
# find the patches if any
if src.product.product_has_patches(p_info):
patches = src.pyconf.Sequence()
p_path.copy(patches_tmp_dir)
patches.append(os.path.basename(patch_path), "")
- product_pyconf_cfg[p_info.section].patches = patches
-
- if with_vcs:
- # put in the pyconf file the resolved values
- for info in ["git_info", "cvs_info", "svn_info"]:
- if info in p_info:
- for key in p_info[info]:
- product_pyconf_cfg[p_info.section][info][key] = p_info[
- info][key]
- else:
- # if the product is not archive, then make it become archive.
- if src.product.product_is_vcs(p_info):
- product_pyconf_cfg[p_info.section].get_source = "archive"
- if not "archive_info" in product_pyconf_cfg[p_info.section]:
- product_pyconf_cfg[p_info.section].addMapping("archive_info",
+ if (not with_vcs) and src.product.product_is_vcs(p_info):
+ # in non vcs mode, if the product is not archive, then make it become archive.
+
+ # depending upon the incremental mode, select impacted sections
+ if "properties" in p_info and "incremental" in p_info.properties and\
+ p_info.properties.incremental == "yes":
+ sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
+ else:
+ sections = [p_info.section]
+ for section in sections:
+ if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
+ DBG.write("sat package set archive mode to archive for product %s and section %s" %\
+ (p_name,section))
+ product_pyconf_cfg[section].get_source = "archive"
+ if not "archive_info" in product_pyconf_cfg[section]:
+ product_pyconf_cfg[section].addMapping("archive_info",
src.pyconf.Mapping(product_pyconf_cfg),
"")
- product_pyconf_cfg[p_info.section
- ].archive_info.archive_name = p_info.name + ".tgz"
+ product_pyconf_cfg[section].archive_info.archive_name =\
+ p_info.name + ".tgz"
# write the pyconf file to the temporary project location
product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
product_pyconf_cfg.__save__(ff, 1)
ff.close()
-def find_application_pyconf(config, application_tmp_dir):
- '''Find the application pyconf file and put it in the specific temporary
+
+def write_application_pyconf(config, application_tmp_dir):
+ '''Write the application pyconf file in the specific temporary
directory containing the specific project of a source package.
:param config Config: The global configuration.
:param application_tmp_dir str: The path to the temporary application
- scripts directory of the project.
+ scripts directory of the project.
'''
- # read the pyconf of the application
application_name = config.VARS.application
- application_pyconf_path = src.find_file_in_lpath(
- application_name + ".pyconf",
- config.PATHS.APPLICATIONPATH)
- application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
-
- # Change the workdir
- application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
- application_pyconf_cfg,
- src.pyconf.DOLLAR,
- 'VARS.salometoolsway + $VARS.sep + ".."')
-
- # Prevent from compilation in base
- application_pyconf_cfg.APPLICATION.no_base = "yes"
-
- #remove products that are not in config (which were filtered by --without_properties)
- for product_name in application_pyconf_cfg.APPLICATION.products.keys():
- if product_name not in config.APPLICATION.products.keys():
- application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
-
# write the pyconf file to the temporary application location
application_tmp_pyconf_path = os.path.join(application_tmp_dir,
application_name + ".pyconf")
-
- ff = open(application_tmp_pyconf_path, 'w')
- ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
- application_pyconf_cfg.__save__(ff, 1)
- ff.close()
+ with open(application_tmp_pyconf_path, 'w') as f:
+ f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
+ res = src.pyconf.Config()
+ app = src.pyconf.deepCopyMapping(config.APPLICATION)
+ # no base in packages
+ if "base" in app:
+ app.base = "no"
+ # Change the workdir
+ app.workdir = src.pyconf.Reference(
+ app,
+ src.pyconf.DOLLAR,
+ 'VARS.salometoolsway + $VARS.sep + ".."')
+ res.addMapping("APPLICATION", app, "")
+ res.__save__(f, evaluated=False)
+
def sat_package(config, tmp_working_dir, options, logger):
'''Prepare a dictionary that stores all the needed directories and files to
SALOME (the directory where this file is located).
"""
+ if src.architecture.is_windows():
+ readme_header = readme_header.replace('$$ROOT','%ROOT%')
readme_compilation_with_binaries="""
compilation based on the binaries used as prerequisites
if options.binaries or options.sources:
d['application'] = config.VARS.application
+ d['BINARIES'] = config.INTERNAL.config.install_dir
+ d['SEPARATOR'] = config.VARS.sep
+ if src.architecture.is_windows():
+ d['operatingSystem'] = 'Windows'
+ d['PYTHON3'] = 'python3'
+ d['ROOT'] = '%ROOT%'
+ else:
+ d['operatingSystem'] = 'Linux'
+ d['PYTHON3'] = ''
+ d['ROOT'] = '$ROOT'
f.write("# Application: " + d['application'] + "\n")
if 'KERNEL' in config.APPLICATION.products:
VersionSalome = src.get_salome_version(config)
if options.sources:
f.write(src.template.substitute(readme_template_path_src, d))
- if options.binaries and options.sources:
+ if options.binaries and options.sources and not src.architecture.is_windows():
f.write(readme_compilation_with_binaries)
if options.project:
if options.project:
if options.sat:
archive_name += "_"
- project_name = options.project
- archive_name += ("satproject_" + project_name)
+ archive_name += ("satproject_" + options.project)
if len(archive_name)==0: # no option worked
msg = _("Error: Cannot name the archive\n"
logger.write("\n", 1)
return 1
- path_targz = os.path.join(dir_name, archive_name + ".tgz")
+ path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
src.printcolors.print_value(logger, "Package path", path_targz, 2)
d_paths_to_substitute[source_dir]=path_in_archive
d_files_to_add.update(d_bin_files_to_add)
-
if options.sources:
d_files_to_add.update(source_package(runner,
runner.cfg,
options, logger))
if options.project:
- DBG.write("config for package %s" % project_name, runner.cfg)
- d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
+ DBG.write("config for package %s" % options.project, runner.cfg)
+ d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
if not(d_files_to_add):
msg = _("Error: Empty dictionnary to build the archive!\n")