import tarfile
import codecs
import string
+import glob
import pprint as PP
-
+import sys
import src
from application import get_SALOME_modules
import src.debug as DBG
+old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
+
BINARY = "binary"
SOURCE = "Source"
PROJECT = "Project"
IGNORED_DIRS = [".git", ".svn"]
IGNORED_EXTENSIONS = []
-PROJECT_TEMPLATE = """#!/usr/bin/env python
+PACKAGE_EXT=".tar.gz" # the extension we use for the packages
+
+if src.architecture.is_windows():
+ PROJECT_TEMPLATE = """#!/usr/bin/env python
#-*- coding:utf-8 -*-
# The path to the archive root directory
# Where to search the pyconf of the machines of the project
MACHINEPATH : $project_path + "machines/"
"""
+else:
+ PROJECT_TEMPLATE = """#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+# path to the PROJECT
+project_path : $PWD + "/"
+
+# Where to search the archives of the products
+ARCHIVEPATH : $project_path + "ARCHIVES"
+# Where to search the pyconf of the applications
+APPLICATIONPATH : $project_path + "applications/"
+# Where to search the pyconf of the products
+PRODUCTPATH : $project_path + "products/"
+# Where to search the pyconf of the jobs of the project
+JOBPATH : $project_path + "jobs/"
+# Where to search the pyconf of the machines of the project
+MACHINEPATH : $project_path + "machines/"
+"""
+
LOCAL_TEMPLATE = ("""#!/usr/bin/env python
#-*- coding:utf-8 -*-
workdir : 'default'
log_dir : 'default'
archive_dir : 'default'
- VCS : None
- tag : None
+ VCS : 'unknown'
+ tag : 'unknown'
}
PROJECTS :
{
-project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
-""" + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
+ project_file_paths :
+ [
+$LOCAL.workdir + $VARS.sep + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"
+ ]
}
""")
parser.add_option('s', 'sources', 'boolean', 'sources',
_('Optional: Produce a compilable archive of the sources of the '
'application.'), False)
+parser.add_option('', 'bin_products', 'boolean', 'bin_products',
+ _('Optional: Create binary archives for all products.'), False)
parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
_('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
- 'Sat prepare will use VCS mode instead to retrieve them'),
+ 'Sat prepare will use VCS mode instead to retrieve them.'
+ '\n Also, when combined with "--bin_products" option, restrict the building of product archives to VCS products.'),
False)
parser.add_option('', 'ftp', 'boolean', 'ftp',
_('Optional: Do not embed archives for products in archive mode.'
'Sat prepare will use ftp instead to retrieve them'),
False)
+parser.add_option('e', 'exe', 'string', 'exe',
+ _('Optional: Produce an extra launcher based upon the exe given as argument.'), "")
parser.add_option('p', 'project', 'string', 'project',
_('Optional: Produce an archive that contains a project.'), "")
parser.add_option('t', 'salometools', 'boolean', 'sat',
_('Optional: The name or full path of the archive.'), None)
parser.add_option('', 'add_files', 'list2', 'add_files',
_('Optional: The list of additional files to add to the archive.'), [])
-parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
- _('Optional: do not add commercial licence.'), False)
parser.add_option('', 'without_properties', 'properties', 'without_properties',
_('Optional: Filter the products by their properties.\n\tSyntax: '
'--without_properties <property>:<value>'))
names = sorted(d_content.keys())
DBG.write("add tar names", names)
+ # used to avoid duplications (for pip install in python, or single_install_dir cases)
+ already_added=set()
for name in names:
# display information
len_points = max_len - len(name) + 3
# of the directory or file to add
# Add it in the archive
try:
- tar.add(local_path, arcname=in_archive, exclude=f_exclude)
+ key=local_path+"->"+in_archive
+ if key not in already_added:
+ if old_python:
+ tar.add(local_path,
+ arcname=in_archive,
+ exclude=exclude_VCS_and_extensions_26)
+ else:
+ tar.add(local_path,
+ arcname=in_archive,
+ filter=exclude_VCS_and_extensions)
+ already_added.add(key)
logger.write(src.printcolors.printcSuccess(_("OK")), 3)
except Exception as e:
logger.write(src.printcolors.printcError(_("KO ")), 3)
logger.write("\n", 3)
return success
-def exclude_VCS_and_extensions(filename):
+
+def exclude_VCS_and_extensions_26(filename):
''' The function that is used to exclude from package the link to the
- VCS repositories (like .git)
+ VCS repositories (like .git) (only for python 2.6)
:param filename Str: The filname to exclude (or not).
:return: True if the file has to be exclude
return True
return False
+def exclude_VCS_and_extensions(tarinfo):
+ ''' The function that is used to exclude from package the link to the
+ VCS repositories (like .git)
+
+ :param filename Str: The filname to exclude (or not).
+ :return: None if the file has to be exclude
+ :rtype: tarinfo or None
+ '''
+ filename = tarinfo.name
+ for dir_name in IGNORED_DIRS:
+ if dir_name in filename:
+ return None
+ for extension in IGNORED_EXTENSIONS:
+ if filename.endswith(extension):
+ return None
+ return tarinfo
+
def produce_relative_launcher(config,
logger,
file_dir,
file_name,
- binaries_dir_name,
- with_commercial=True):
+ binaries_dir_name):
'''Create a specific SALOME launcher for the binary package. This launcher
uses relative paths.
:rtype: str
'''
+ # set base mode to "no" for the archive - save current mode to restore it at the end
+ if "base" in config.APPLICATION:
+ base_setting=config.APPLICATION.base
+ else:
+ base_setting="maybe"
+ config.APPLICATION.base="no"
+
# get KERNEL installation path
- kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
+ kernel_info = src.product.get_product_config(config, "KERNEL")
+ kernel_base_name=os.path.basename(kernel_info.install_dir)
+ if kernel_info.install_mode == "base":
+ # case of kernel installed in base. the kernel install dir name is different in the archive
+ kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
+
+ kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
# set kernel bin dir (considering fhs property)
kernel_cfg = src.product.get_product_config(config, "KERNEL")
else:
app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
- # Get the launcher template and do substitutions
+ additional_env={}
+ additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
+ config.VARS.sep + bin_kernel_install_dir
if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
- withProfile = src.fileEnviron.withProfile3
+ additional_env['sat_python_version'] = 3
else:
- withProfile = src.fileEnviron.withProfile
-
- withProfile = withProfile.replace(
- "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
- "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + app_root_dir + "'")
- withProfile = withProfile.replace(
- " 'BIN_KERNEL_INSTALL_DIR'",
- " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
+ additional_env['sat_python_version'] = 2
- before, after = withProfile.split("# here your local standalone environment\n")
+ additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
+ launcher_name = src.get_launcher_name(config)
+ additional_env['APPLI'] = "out_dir_Path" + config.VARS.sep + file_name
# create an environment file writer
writer = src.environment.FileEnvWriter(config,
logger,
file_dir,
- src_root=None)
+ src_root=None,
+ env_info=None)
filepath = os.path.join(file_dir, file_name)
- # open the file and write into it
- launch_file = open(filepath, "w")
- launch_file.write(before)
# Write
- writer.write_cfgForPy_file(launch_file,
- for_package = binaries_dir_name,
- with_commercial=with_commercial)
- launch_file.write(after)
- launch_file.close()
+ writer.write_env_file(filepath,
+ False, # for launch
+ "cfgForPy",
+ additional_env=additional_env,
+ no_path_init=False,
+ for_package = binaries_dir_name)
# Little hack to put out_dir_Path outside the strings
src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
+ src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
# A hack to put a call to a file for distene licence.
# It does nothing to an application that has no distene product
stat.S_IXGRP |
stat.S_IXOTH)
+ # restore modified setting by its initial value
+ config.APPLICATION.base=base_setting
+
return filepath
def hack_for_distene_licence(filepath, licence_file):
return
del text[num_line +1]
del text[num_line +1]
- text_to_insert =""" import imp
- try:
- distene = imp.load_source('distene_licence', '%s')
+ text_to_insert =""" try:
+ distene_licence_file=r"%s"
+ if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
+ import importlib.util
+ spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
+ distene=importlib.util.module_from_spec(spec_dist)
+ spec_dist.loader.exec_module(distene)
+ else:
+ import imp
+ distene = imp.load_source('distene_licence', distene_licence_file)
distene.set_distene_variables(context)
except:
pass\n""" % licence_file
def produce_relative_env_files(config,
logger,
file_dir,
- binaries_dir_name):
+ binaries_dir_name,
+ exe_name=None):
'''Create some specific environment files for the binary package. These
files use relative paths.
:param file_dir str: the directory where to put the files
:param binaries_dir_name str: the name of the repository where the binaries
are, in the archive.
+ :param exe_name str: if given generate a launcher executing exe_name
:return: the list of path of the produced environment files
:rtype: List
'''
+
+ # set base mode to "no" for the archive - save current mode to restore it at the end
+ if "base" in config.APPLICATION:
+ base_setting=config.APPLICATION.base
+ else:
+ base_setting="maybe"
+ config.APPLICATION.base="no"
+
# create an environment file writer
writer = src.environment.FileEnvWriter(config,
logger,
file_dir,
src_root=None)
+ if src.architecture.is_windows():
+ shell = "bat"
+ filename = "env_launch.bat"
+ else:
+ shell = "bash"
+ filename = "env_launch.sh"
+
+ if exe_name:
+ filename=os.path.basename(exe_name)
+
# Write
- filepath = writer.write_env_file("env_launch.sh",
+ filepath = writer.write_env_file(filename,
False, # for launch
- "bash",
+ shell,
for_package = binaries_dir_name)
# Little hack to put out_dir_Path as environment variable
- src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
+ if src.architecture.is_windows() :
+ src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
+ src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
+ src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
+ else:
+ src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
+ src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
+ src.replace_in_file(filepath, ';out_dir_Path', ';${out_dir_Path}' )
+
+ if exe_name:
+ if src.architecture.is_windows():
+ cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
+ else:
+ cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
+ with open(filepath, "a") as exe_launcher:
+ exe_launcher.write(cmd)
# change the rights in order to make the file executable for everybody
os.chmod(filepath,
stat.S_IXGRP |
stat.S_IXOTH)
+ # restore modified setting by its initial value
+ config.APPLICATION.base=base_setting
+
return filepath
def produce_install_bin_file(config,
"INSTALL_BIN.template")
# build the name of the directory that will contain the binaries
- binaries_dir_name = "BINARIES-" + config.VARS.dist
+ binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
# build the substitution loop
loop_cmd = "for f in $(grep -RIl"
for key in d_sub:
loop_cmd += " -e "+ key
- loop_cmd += ' INSTALL); do\n sed -i "\n'
+ loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
+ '); do\n sed -i "\n'
for key in d_sub:
loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
loop_cmd += ' " $f\ndone'
d={}
d["BINARIES_DIR"] = binaries_dir_name
d["SUBSTITUTION_LOOP"]=loop_cmd
+ d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
# substitute the template and write it in file
content=src.template.substitute(installbin_template_path, d)
return tmp_file_path
+def bin_products_archives(config, logger, only_vcs):
+ '''Prepare binary packages for all products
+ :param config Config: The global configuration.
+ :return: the error status
+ :rtype: bool
+ '''
+
+ logger.write("Make %s binary archives\n" % config.VARS.dist)
+ # Get the default directory where to put the packages
+ binpackage_path = os.path.join(config.APPLICATION.workdir, "PACKAGE", "products")
+ src.ensure_path_exists(binpackage_path)
+ # Get the list of product installation to add to the archive
+ l_products_name = sorted(config.APPLICATION.products.keys())
+ l_product_info = src.product.get_products_infos(l_products_name,
+ config)
+ # first loop on products : filter products, analyse properties,
+ # and store the information that will be used to create the archive in the second loop
+ l_not_installed=[] # store not installed products for warning at the end
+ for prod_name, prod_info in l_product_info:
+ # ignore the native and fixed products for install directories
+ if (src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
+ or src.product.product_is_native(prod_info)
+ or src.product.product_is_fixed(prod_info)
+ or not src.product.product_compiles(prod_info)):
+ continue
+ if only_vcs and not src.product.product_is_vcs(prod_info):
+ continue
+ if not src.product.check_installation(config, prod_info):
+ l_not_installed.append(prod_name)
+ continue # product is not installed, we skip it
+ # prepare call to make_bin_archive
+ path_targz_prod = os.path.join(binpackage_path, prod_name + '-' + prod_info.version.replace("/", "_") + "-" + config.VARS.dist + PACKAGE_EXT)
+ targz_prod = tarfile.open(path_targz_prod, mode='w:gz')
+ bin_path = prod_info.install_dir
+ targz_prod.add(bin_path)
+ targz_prod.close()
+ # Python program to find MD5 hash value of a file
+ import hashlib
+ with open(path_targz_prod,"rb") as f:
+ bytes = f.read() # read file as bytes
+ readable_hash = hashlib.md5(bytes).hexdigest();
+ with open(path_targz_prod+".md5", "w") as md5sum:
+ md5sum.write("%s %s" % (readable_hash, os.path.basename(path_targz_prod)))
+ logger.write(" archive : %s (md5sum = %s)\n" % (path_targz_prod, readable_hash))
+
+ return 0
+
def binary_package(config, logger, options, tmp_working_dir):
'''Prepare a dictionary that stores all the needed directories and files to
add in a binary package.
l_products_name = sorted(config.APPLICATION.products.keys())
l_product_info = src.product.get_products_infos(l_products_name,
config)
+
+ # suppress compile time products for binaries-only archives
+ if not options.sources:
+ update_config(config, logger, "compile_time", "yes")
+
l_install_dir = []
l_source_dir = []
l_not_installed = []
config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
generate_mesa_launcher=True
+ # first loop on products : filter products, analyse properties,
+ # and store the information that will be used to create the archive in the second loop
for prod_name, prod_info in l_product_info:
# skip product with property not_in_package set to yes
if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
or src.product.product_is_fixed(prod_info)
or not src.product.product_compiles(prod_info)):
continue
- if src.product.check_installation(prod_info):
- l_install_dir.append((prod_name, prod_info.install_dir))
+ #
+ # products with single_dir property will be installed in the PRODUCTS directory of the archive
+ is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
+ src.product.product_test_property(prod_info,"single_install_dir", "yes"))
+ if src.product.check_installation(config, prod_info):
+ l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir,
+ is_single_dir, prod_info.install_mode))
else:
l_not_installed.append(prod_name)
# cpp module
for name_cpp in src.product.get_product_components(prod_info):
install_dir = os.path.join(config.APPLICATION.workdir,
- "INSTALL", name_cpp)
+ config.INTERNAL.config.install_dir,
+ name_cpp)
if os.path.exists(install_dir):
- l_install_dir.append((name_cpp, install_dir))
+ l_install_dir.append((name_cpp, name_cpp, install_dir, False, "value"))
else:
l_not_installed.append(name_cpp)
# check the name of the directory that (could) contains the binaries
# from previous detar
- binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
+ binaries_from_detar = os.path.join(
+ config.APPLICATION.workdir,
+ config.INTERNAL.config.binary_dir + config.VARS.dist)
if os.path.exists(binaries_from_detar):
logger.write("""
WARNING: existing binaries directory from previous detar installation:
if len(l_not_installed) > 0:
text_missing_prods = ""
for p_name in l_not_installed:
- text_missing_prods += "-" + p_name + "\n"
+ text_missing_prods += " - " + p_name + "\n"
if not options.force_creation:
- msg = _("ERROR: there are missing products installations:")
+ msg = _("ERROR: there are missing product installations:")
logger.write("%s\n%s" % (src.printcolors.printcError(msg),
text_missing_prods),
1)
- return None
+ raise src.SatException(msg)
else:
msg = _("WARNING: there are missing products installations:")
logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
for p_name in l_sources_not_present:
text_missing_prods += "-" + p_name + "\n"
if not options.force_creation:
- msg = _("ERROR: there are missing products sources:")
+ msg = _("ERROR: there are missing product sources:")
logger.write("%s\n%s" % (src.printcolors.printcError(msg),
text_missing_prods),
1)
- return None
+ raise src.SatException(msg)
else:
msg = _("WARNING: there are missing products sources:")
logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
1)
# construct the name of the directory that will contain the binaries
- binaries_dir_name = "BINARIES-" + config.VARS.dist
-
+ if src.architecture.is_windows():
+ binaries_dir_name = config.INTERNAL.config.binary_dir
+ else:
+ binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
# construct the correlation table between the product names, there
# actual install directories and there install directory in archive
d_products = {}
- for prod_name, install_dir in l_install_dir:
- path_in_archive = os.path.join(binaries_dir_name, prod_name)
+ for prod_name, prod_info_name, install_dir, is_single_dir, install_mode in l_install_dir:
+ prod_base_name=os.path.basename(install_dir)
+ if install_mode == "base":
+ # case of a products installed in base.
+ # because the archive is in base:no mode, the name of the install dir is different inside archive
+ # we set it to the product name or by PRODUCTS if single-dir
+ if is_single_dir:
+ prod_base_name=config.INTERNAL.config.single_install_dir
+ else:
+ prod_base_name=prod_info_name
+ path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
for prod_name, source_dir in l_source_dir:
path_in_archive = os.path.join("SOURCES", prod_name)
d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
+ #Â create an archives of compilation logs, and insert it into the tarball
+ logpath=os.path.join(config.APPLICATION.workdir, "LOGS")
+ path_targz_logs = os.path.join(tmp_working_dir, "logs.tgz")
+ tar_log = tarfile.open(path_targz_logs, mode='w:gz')
+ tar_log.add(logpath, arcname="LOGS")
+ tar_log.close()
+ d_products["LOGS"] = (path_targz_logs, "logs.tgz")
+
# for packages of SALOME applications including KERNEL,
# we produce a salome launcher or a virtual application (depending on salome version)
if 'KERNEL' in config.APPLICATION.products:
logger,
tmp_working_dir,
launcher_name,
- binaries_dir_name,
- not(options.without_commercial))
+ binaries_dir_name)
d_products["launcher"] = (launcher_package, launcher_name)
# if the application contains mesa products, we generate in addition to the
logger,
tmp_working_dir,
launcher_mesa_name,
- binaries_dir_name,
- not(options.without_commercial))
+ binaries_dir_name)
d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
# if there was a use_mesa value, we restore it
if options.sources:
# if we mix binaries and sources, we add a copy of the launcher,
# prefixed with "bin",in order to avoid clashes
- d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
+ launcher_copy_name="bin"+launcher_name
+ launcher_package_copy = produce_relative_launcher(config,
+ logger,
+ tmp_working_dir,
+ launcher_copy_name,
+ binaries_dir_name)
+ d_products["launcher (copy)"] = (launcher_package_copy, launcher_copy_name)
else:
# Provide a script for the creation of an application EDF style
appli_script = product_appli_creation_script(config,
tmp_working_dir,
binaries_dir_name)
- d_products["environment file"] = (env_file, "env_launch.sh")
-
+ if src.architecture.is_windows():
+ filename = "env_launch.bat"
+ else:
+ filename = "env_launch.sh"
+ d_products["environment file"] = (env_file, filename)
+
+ # If option exe, produce an extra launcher based on specified exe
+ if options.exe:
+ exe_file = produce_relative_env_files(config,
+ logger,
+ tmp_working_dir,
+ binaries_dir_name,
+ options.exe)
+
+ if src.architecture.is_windows():
+ filename = os.path.basename(options.exe) + ".bat"
+ else:
+ filename = os.path.basename(options.exe) + ".sh"
+ d_products["exe file"] = (exe_file, filename)
+
+
return d_products
def source_package(sat, config, logger, options, tmp_working_dir):
# Add salomeTools
tmp_sat = add_salomeTools(config, tmp_working_dir)
- d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
+ d_sat = {"salomeTools" : (tmp_sat, "sat")}
# Add a sat symbolic link if not win
if not src.architecture.is_windows():
- tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
try:
t = os.getcwd()
except:
# In the jobs, os.getcwd() can fail
t = config.LOCAL.workdir
os.chdir(tmp_working_dir)
- if os.path.lexists(tmp_satlink_path):
- os.remove(tmp_satlink_path)
- os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
+
+ # create a symlink, to avoid reference with "salomeTool/.."
+ os.chdir("PROJECT")
+ if os.path.lexists("ARCHIVES"):
+ os.remove("ARCHIVES")
+ os.symlink("../ARCHIVES", "ARCHIVES")
os.chdir(t)
- d_sat["sat link"] = (tmp_satlink_path, "sat")
+ d_sat["sat archive link"] = (os.path.join(tmp_working_dir,"PROJECT", "ARCHIVES"),
+ os.path.join("PROJECT", "ARCHIVES"))
d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
return d_source
if p_info.get_source == "archive":
archive_path = p_info.archive_info.archive_name
archive_name = os.path.basename(archive_path)
+ d_archives[p_name] = (archive_path,
+ os.path.join(ARCHIVE_DIR, archive_name))
+ if (src.appli_test_property(config,"pip", "yes") and
+ src.product.product_test_property(p_info,"pip", "yes")):
+ # if pip mode is activated, and product is managed by pip
+ pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
+ pip_wheel_pattern=os.path.join(pip_wheels_dir,
+ "%s-%s*" % (p_info.name, p_info.version))
+ pip_wheel_path=glob.glob(pip_wheel_pattern)
+ msg_pip_not_found="Error in get_archive, pip wheel for "\
+ "product %s-%s was not found in %s directory"
+ msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
+ "product %s-%s were found in %s directory"
+ if len(pip_wheel_path)==0:
+ raise src.SatException(msg_pip_not_found %\
+ (p_info.name, p_info.version, pip_wheels_dir))
+ if len(pip_wheel_path)>1:
+ raise src.SatException(msg_pip_two_or_more %\
+ (p_info.name, p_info.version, pip_wheels_dir))
+
+ pip_wheel_name=os.path.basename(pip_wheel_path[0])
+ d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0],
+ os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
else:
- l_pinfo_vcs.append((p_name, p_info))
+ # this product is not managed by archive,
+ # an archive of the vcs directory will be created by get_archive_vcs
+ l_pinfo_vcs.append((p_name, p_info))
- d_archives[p_name] = (archive_path,
- os.path.join(ARCHIVE_DIR, archive_name))
return d_archives, l_pinfo_vcs
def add_salomeTools(config, tmp_working_dir):
# DBG.write("END sat config", sat.cfg.APPLICATION, True)
return d_archives_vcs
+def make_bin_archive(prod_name, prod_info, where):
+ '''Create an archive of a product by searching its source directory.
+
+ :param prod_name str: The name of the product.
+ :param prod_info Config: The specific configuration corresponding to the
+ product
+ :param where str: The path of the repository where to put the resulting
+ archive
+ :return: The path of the resulting archive
+ :rtype: str
+ '''
+ path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
+ tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
+ bin_path = prod_info.install_dir
+ tar_prod.add(bin_path, arcname=path_targz_prod)
+ tar_prod.close()
+ return path_targz_prod
+
def make_archive(prod_name, prod_info, where):
'''Create an archive of a product by searching its source directory.
:return: The path of the resulting archive
:rtype: str
'''
- path_targz_prod = os.path.join(where, prod_name + ".tgz")
+ path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
local_path = prod_info.source_dir
- tar_prod.add(local_path,
- arcname=prod_name,
- exclude=exclude_VCS_and_extensions)
+ if old_python:
+ tar_prod.add(local_path,
+ arcname=prod_name,
+ exclude=exclude_VCS_and_extensions_26)
+ else:
+ tar_prod.add(local_path,
+ arcname=prod_name,
+ filter=exclude_VCS_and_extensions)
tar_prod.close()
return path_targz_prod
patches_tmp_dir,
products_pyconf_tmp_dir)
- find_application_pyconf(config, application_tmp_dir)
+ # for the application pyconf, we write directly the config
+ # don't search for the original pyconf file
+ # to avoid problems with overwrite sections and rm_products key
+ write_application_pyconf(config, application_tmp_dir)
d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
return d_project
'''
# read the pyconf of the product
- product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
- config.PATHS.PRODUCTPATH)
- product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
+ product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
# find the compilation script if any
if src.product.product_has_script(p_info):
compil_script_path = src.Path(p_info.compil_script)
compil_script_path.copy(compil_scripts_tmp_dir)
- product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
- p_info.compil_script)
+
# find the environment script if any
if src.product.product_has_env_script(p_info):
env_script_path = src.Path(p_info.environ.env_script)
env_script_path.copy(env_scripts_tmp_dir)
- product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
- p_info.environ.env_script)
+
# find the patches if any
if src.product.product_has_patches(p_info):
patches = src.pyconf.Sequence()
p_path.copy(patches_tmp_dir)
patches.append(os.path.basename(patch_path), "")
- product_pyconf_cfg[p_info.section].patches = patches
-
- if with_vcs:
- # put in the pyconf file the resolved values
- for info in ["git_info", "cvs_info", "svn_info"]:
- if info in p_info:
- for key in p_info[info]:
- product_pyconf_cfg[p_info.section][info][key] = p_info[
- info][key]
- else:
- # if the product is not archive, then make it become archive.
- if src.product.product_is_vcs(p_info):
- product_pyconf_cfg[p_info.section].get_source = "archive"
- if not "archive_info" in product_pyconf_cfg[p_info.section]:
- product_pyconf_cfg[p_info.section].addMapping("archive_info",
+ if (not with_vcs) and src.product.product_is_vcs(p_info):
+ # in non vcs mode, if the product is not archive, then make it become archive.
+
+ # depending upon the incremental mode, select impacted sections
+ if "properties" in p_info and "incremental" in p_info.properties and\
+ p_info.properties.incremental == "yes":
+ sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
+ else:
+ sections = [p_info.section]
+ for section in sections:
+ if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
+ DBG.write("sat package set archive mode to archive for product %s and section %s" %\
+ (p_name,section))
+ product_pyconf_cfg[section].get_source = "archive"
+ if not "archive_info" in product_pyconf_cfg[section]:
+ product_pyconf_cfg[section].addMapping("archive_info",
src.pyconf.Mapping(product_pyconf_cfg),
"")
- product_pyconf_cfg[p_info.section
- ].archive_info.archive_name = p_info.name + ".tgz"
+ product_pyconf_cfg[section].archive_info.archive_name =\
+ p_info.name + ".tgz"
+ # save git repositories for vcs products, even if archive is not in VCS mode
+ # in this case the user will be able to change get_source flag and work with git
+ if src.product.product_is_vcs(p_info):
+ # in vcs mode we must replace explicitely the git server url
+ # (or it will not be found later because project files are not exported in archives)
+ for section in product_pyconf_cfg:
+ # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
+ if "git_info" in product_pyconf_cfg[section]:
+ for repo in product_pyconf_cfg[section].git_info:
+ if repo in p_info.git_info:
+ product_pyconf_cfg[section].git_info[repo] = p_info.git_info[repo]
+
# write the pyconf file to the temporary project location
product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
p_name + ".pyconf")
product_pyconf_cfg.__save__(ff, 1)
ff.close()
-def find_application_pyconf(config, application_tmp_dir):
- '''Find the application pyconf file and put it in the specific temporary
+
+def write_application_pyconf(config, application_tmp_dir):
+ '''Write the application pyconf file in the specific temporary
directory containing the specific project of a source package.
:param config Config: The global configuration.
:param application_tmp_dir str: The path to the temporary application
- scripts directory of the project.
+ scripts directory of the project.
'''
- # read the pyconf of the application
application_name = config.VARS.application
- application_pyconf_path = src.find_file_in_lpath(
- application_name + ".pyconf",
- config.PATHS.APPLICATIONPATH)
- application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
-
- # Change the workdir
- application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
- application_pyconf_cfg,
- src.pyconf.DOLLAR,
- 'VARS.salometoolsway + $VARS.sep + ".."')
-
- # Prevent from compilation in base
- application_pyconf_cfg.APPLICATION.no_base = "yes"
-
- #remove products that are not in config (which were filtered by --without_properties)
- for product_name in application_pyconf_cfg.APPLICATION.products.keys():
- if product_name not in config.APPLICATION.products.keys():
- application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
-
# write the pyconf file to the temporary application location
application_tmp_pyconf_path = os.path.join(application_tmp_dir,
application_name + ".pyconf")
-
- ff = open(application_tmp_pyconf_path, 'w')
- ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
- application_pyconf_cfg.__save__(ff, 1)
- ff.close()
+ with open(application_tmp_pyconf_path, 'w') as f:
+ f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
+ res = src.pyconf.Config()
+ app = src.pyconf.deepCopyMapping(config.APPLICATION)
+
+ # set base mode to "no" for the archive
+ app.base = "no"
+
+ # Change the workdir
+ app.workdir = src.pyconf.Reference(
+ app,
+ src.pyconf.DOLLAR,
+ 'LOCAL.workdir')
+ res.addMapping("APPLICATION", app, "")
+ res.__save__(f, evaluated=False)
+
def sat_package(config, tmp_working_dir, options, logger):
'''Prepare a dictionary that stores all the needed directories and files to
SALOME (the directory where this file is located).
"""
+ if src.architecture.is_windows():
+ readme_header = readme_header.replace('$$ROOT','%ROOT%')
readme_compilation_with_binaries="""
compilation based on the binaries used as prerequisites
if options.binaries or options.sources:
d['application'] = config.VARS.application
+ d['BINARIES'] = config.INTERNAL.config.binary_dir
+ d['SEPARATOR'] = config.VARS.sep
+ if src.architecture.is_windows():
+ d['operatingSystem'] = 'Windows'
+ d['PYTHON3'] = 'python3'
+ d['ROOT'] = '%ROOT%'
+ else:
+ d['operatingSystem'] = 'Linux'
+ d['PYTHON3'] = ''
+ d['ROOT'] = '$ROOT'
f.write("# Application: " + d['application'] + "\n")
if 'KERNEL' in config.APPLICATION.products:
VersionSalome = src.get_salome_version(config)
if options.sources:
f.write(src.template.substitute(readme_template_path_src, d))
- if options.binaries and options.sources:
+ if options.binaries and options.sources and not src.architecture.is_windows():
f.write(readme_compilation_with_binaries)
if options.project:
return readme_path
-def update_config(config, prop, value):
+def update_config(config, logger, prop, value):
'''Remove from config.APPLICATION.products the products that have the property given as input.
:param config Config: The global config.
l_product_to_remove.append(product_name)
for product_name in l_product_to_remove:
config.APPLICATION.products.__delitem__(product_name)
+ logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
def description():
'''method that is called when salomeTools is called with --help option.
# Parse the options
(options, args) = parser.parse_args(args)
+
# Check that a type of package is called, and only one
all_option_types = (options.binaries,
options.sources,
options.project not in ["", None],
- options.sat)
+ options.sat,
+ options.bin_products)
# Check if no option for package type
if all_option_types.count(True) == 0:
msg = _("Error: Precise a type for the package\nUse one of the "
"following options: --binaries, --sources, --project or"
- " --salometools")
+ " --salometools, --bin_products")
logger.write(src.printcolors.printcError(msg), 1)
logger.write("\n", 1)
return 1
-
+ do_create_package = options.binaries or options.sources or options.project or options.sat
+
+ if options.bin_products:
+ ret = bin_products_archives(runner.cfg, logger, options.with_vcs)
+ if ret!=0:
+ return ret
+ if not do_create_package:
+ return 0
+
+ # continue to create a tar.gz package
+
# The repository where to put the package if not Binary or Source
package_default_path = runner.cfg.LOCAL.workdir
-
# if the package contains binaries or sources:
- if options.binaries or options.sources:
+ if options.binaries or options.sources or options.bin_products:
# Check that the command has been called with an application
src.check_config_has_application(runner.cfg)
# Remove the products that are filtered by the --without_properties option
if options.without_properties:
- app = runner.cfg.APPLICATION
- logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
prop, value = options.without_properties
- update_config(runner.cfg, prop, value)
- logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
+ update_config(runner.cfg, logger, prop, value)
# Remove from config the products that have the not_in_package property
- update_config(runner.cfg, "not_in_package", "yes")
-
+ update_config(runner.cfg, logger, "not_in_package", "yes")
+
# get the name of the archive or build it
if options.name:
if os.path.basename(options.name) == options.name:
if options.project:
if options.sat:
archive_name += "_"
- project_name = options.project
- archive_name += ("satproject_" + project_name)
+ archive_name += ("satproject_" + options.project)
if len(archive_name)==0: # no option worked
msg = _("Error: Cannot name the archive\n"
logger.write("\n", 1)
return 1
- path_targz = os.path.join(dir_name, archive_name + ".tgz")
+ path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
src.printcolors.print_value(logger, "Package path", path_targz, 2)
for key in d_bin_files_to_add:
if key.endswith("(bin)"):
source_dir = d_bin_files_to_add[key][0]
- path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" + runner.cfg.VARS.dist,"INSTALL")
+ path_in_archive = d_bin_files_to_add[key][1].replace(
+ runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
+ runner.cfg.INTERNAL.config.install_dir)
if os.path.basename(source_dir)==os.path.basename(path_in_archive):
# if basename is the same we will just substitute the dirname
d_paths_to_substitute[os.path.dirname(source_dir)]=\
d_paths_to_substitute[source_dir]=path_in_archive
d_files_to_add.update(d_bin_files_to_add)
-
if options.sources:
d_files_to_add.update(source_package(runner,
runner.cfg,
options, logger))
if options.project:
- DBG.write("config for package %s" % project_name, runner.cfg)
- d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
+ DBG.write("config for package %s" % options.project, runner.cfg)
+ d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
if not(d_files_to_add):
msg = _("Error: Empty dictionnary to build the archive!\n")
tar = tarfile.open(path_targz, mode='w:gz')
# get the filtering function if needed
- filter_function = exclude_VCS_and_extensions
+ if old_python:
+ filter_function = exclude_VCS_and_extensions_26
+ else:
+ filter_function = exclude_VCS_and_extensions
# Add the files to the tarfile object
res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
if os.path.isdir(tmp_local_working_dir):
shutil.rmtree(tmp_local_working_dir)
- # have to decide some time
- DBG.tofix("make shutil.rmtree('%s') effective" % tmp_working_dir, "", DBG.isDeveloper())
+ # remove the tmp directory, unless user has registered as developer
+ if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
+ shutil.rmtree(tmp_working_dir)
# Print again the path of the package
logger.write("\n", 2)