import string
import glob
import pprint as PP
-
+import sys
import src
from application import get_SALOME_modules
import src.debug as DBG
+old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
+
BINARY = "binary"
SOURCE = "Source"
PROJECT = "Project"
PACKAGE_EXT=".tar.gz" # the extension we use for the packages
-PROJECT_TEMPLATE = """#!/usr/bin/env python
+if src.architecture.is_windows():
+ PROJECT_TEMPLATE = """#!/usr/bin/env python
#-*- coding:utf-8 -*-
# The path to the archive root directory
# Where to search the pyconf of the machines of the project
MACHINEPATH : $project_path + "machines/"
"""
+else:
+ PROJECT_TEMPLATE = """#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+# path to the PROJECT
+project_path : $PWD + "/"
+
+# Where to search the archives of the products
+ARCHIVEPATH : $project_path + "ARCHIVES"
+# Where to search the pyconf of the applications
+APPLICATIONPATH : $project_path + "applications/"
+# Where to search the pyconf of the products
+PRODUCTPATH : $project_path + "products/"
+# Where to search the pyconf of the jobs of the project
+JOBPATH : $project_path + "jobs/"
+# Where to search the pyconf of the machines of the project
+MACHINEPATH : $project_path + "machines/"
+"""
+
LOCAL_TEMPLATE = ("""#!/usr/bin/env python
#-*- coding:utf-8 -*-
PROJECTS :
{
-project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
-""" + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
+ project_file_paths :
+ [
+ ]
}
""")
parser.add_option('s', 'sources', 'boolean', 'sources',
_('Optional: Produce a compilable archive of the sources of the '
'application.'), False)
+parser.add_option('', 'bin_products', 'boolean', 'bin_products',
+ _('Optional: Create binary archives for all products.'), False)
parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
_('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
- 'Sat prepare will use VCS mode instead to retrieve them'),
+ 'Sat prepare will use VCS mode instead to retrieve them.'
+ '\n Also, when combined with "--bin_products" option, restrict the building of product archives to VCS products.'),
False)
parser.add_option('', 'ftp', 'boolean', 'ftp',
_('Optional: Do not embed archives for products in archive mode.'
'Sat prepare will use ftp instead to retrieve them'),
False)
+parser.add_option('e', 'exe', 'string', 'exe',
+ _('Optional: Produce an extra launcher based upon the exe given as argument.'), "")
parser.add_option('p', 'project', 'string', 'project',
_('Optional: Produce an archive that contains a project.'), "")
parser.add_option('t', 'salometools', 'boolean', 'sat',
try:
key=local_path+"->"+in_archive
if key not in already_added:
- tar.add(local_path, arcname=in_archive, filter=f_exclude)
+ if old_python:
+ tar.add(local_path,
+ arcname=in_archive,
+ exclude=exclude_VCS_and_extensions_26)
+ else:
+ tar.add(local_path,
+ arcname=in_archive,
+ filter=exclude_VCS_and_extensions)
already_added.add(key)
logger.write(src.printcolors.printcSuccess(_("OK")), 3)
except Exception as e:
logger.write("\n", 3)
return success
+
+def exclude_VCS_and_extensions_26(filename):
+ ''' The function that is used to exclude from package the link to the
+ VCS repositories (like .git) (only for python 2.6)
+
+ :param filename Str: The filname to exclude (or not).
+ :return: True if the file has to be exclude
+ :rtype: Boolean
+ '''
+ for dir_name in IGNORED_DIRS:
+ if dir_name in filename:
+ return True
+ for extension in IGNORED_EXTENSIONS:
+ if filename.endswith(extension):
+ return True
+ return False
+
def exclude_VCS_and_extensions(tarinfo):
''' The function that is used to exclude from package the link to the
VCS repositories (like .git)
:rtype: str
'''
+ # set base mode to "no" for the archive - save current mode to restore it at the end
+ if "base" in config.APPLICATION:
+ base_setting=config.APPLICATION.base
+ else:
+ base_setting="maybe"
+ config.APPLICATION.base="no"
+
# get KERNEL installation path
kernel_info = src.product.get_product_config(config, "KERNEL")
kernel_base_name=os.path.basename(kernel_info.install_dir)
- if kernel_base_name.startswith("config"):
- # case of kernel installed in base. We remove "config-i"
+ if kernel_info.install_mode == "base":
+ # case of kernel installed in base. the kernel install dir name is different in the archive
kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
additional_env['sat_python_version'] = 2
additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
+ launcher_name = src.get_launcher_name(config)
+ additional_env['APPLI'] = "out_dir_Path" + config.VARS.sep + file_name
# create an environment file writer
writer = src.environment.FileEnvWriter(config,
stat.S_IXGRP |
stat.S_IXOTH)
+ # restore modified setting by its initial value
+ config.APPLICATION.base=base_setting
+
return filepath
def hack_for_distene_licence(filepath, licence_file):
def produce_relative_env_files(config,
logger,
file_dir,
- binaries_dir_name):
+ binaries_dir_name,
+ exe_name=None):
'''Create some specific environment files for the binary package. These
files use relative paths.
:param file_dir str: the directory where to put the files
:param binaries_dir_name str: the name of the repository where the binaries
are, in the archive.
+ :param exe_name str: if given generate a launcher executing exe_name
:return: the list of path of the produced environment files
:rtype: List
'''
+
+ # set base mode to "no" for the archive - save current mode to restore it at the end
+ if "base" in config.APPLICATION:
+ base_setting=config.APPLICATION.base
+ else:
+ base_setting="maybe"
+ config.APPLICATION.base="no"
+
# create an environment file writer
writer = src.environment.FileEnvWriter(config,
logger,
shell = "bash"
filename = "env_launch.sh"
+ if exe_name:
+ filename=os.path.basename(exe_name)
+
# Write
filepath = writer.write_env_file(filename,
False, # for launch
src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
+ if exe_name:
+ if src.architecture.is_windows():
+ cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
+ else:
+ cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
+ with open(filepath, "a") as exe_launcher:
+ exe_launcher.write(cmd)
+
# change the rights in order to make the file executable for everybody
os.chmod(filepath,
stat.S_IRUSR |
stat.S_IXGRP |
stat.S_IXOTH)
+ # restore modified setting by its initial value
+ config.APPLICATION.base=base_setting
+
return filepath
def produce_install_bin_file(config,
return tmp_file_path
+def bin_products_archives(config, logger, only_vcs):
+ '''Prepare binary packages for all products
+ :param config Config: The global configuration.
+ :return: the error status
+ :rtype: bool
+ '''
+
+ logger.write("Make %s binary archives\n" % config.VARS.dist)
+ # Get the default directory where to put the packages
+ binpackage_path = os.path.join(config.APPLICATION.workdir, "PACKAGE", "products")
+ src.ensure_path_exists(binpackage_path)
+ # Get the list of product installation to add to the archive
+ l_products_name = sorted(config.APPLICATION.products.keys())
+ l_product_info = src.product.get_products_infos(l_products_name,
+ config)
+ # first loop on products : filter products, analyse properties,
+ # and store the information that will be used to create the archive in the second loop
+ l_not_installed=[] # store not installed products for warning at the end
+ for prod_name, prod_info in l_product_info:
+ # ignore the native and fixed products for install directories
+ if (src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
+ or src.product.product_is_native(prod_info)
+ or src.product.product_is_fixed(prod_info)
+ or not src.product.product_compiles(prod_info)):
+ continue
+ if only_vcs and not src.product.product_is_vcs(prod_info):
+ continue
+ if not src.product.check_installation(config, prod_info):
+ l_not_installed.append(prod_name)
+ continue # product is not installed, we skip it
+ # prepare call to make_bin_archive
+ path_targz_prod = os.path.join(binpackage_path, prod_name + '-' + prod_info.version + "-" + config.VARS.dist + PACKAGE_EXT)
+ targz_prod = tarfile.open(path_targz_prod, mode='w:gz')
+ bin_path = prod_info.install_dir
+ targz_prod.add(bin_path)
+ targz_prod.close()
+ # Python program to find MD5 hash value of a file
+ import hashlib
+ with open(path_targz_prod,"rb") as f:
+ bytes = f.read() # read file as bytes
+ readable_hash = hashlib.md5(bytes).hexdigest();
+ with open(path_targz_prod+".md5", "w") as md5sum:
+ md5sum.write("%s %s" % (readable_hash, os.path.basename(path_targz_prod)))
+ logger.write(" archive : %s (md5sum = %s)\n" % (path_targz_prod, readable_hash))
+
+ return 0
+
def binary_package(config, logger, options, tmp_working_dir):
'''Prepare a dictionary that stores all the needed directories and files to
add in a binary package.
l_products_name = sorted(config.APPLICATION.products.keys())
l_product_info = src.product.get_products_infos(l_products_name,
config)
+
+ # suppress compile time products for binaries-only archives
+ if not options.sources:
+ update_config(config, logger, "compile_time", "yes")
+
l_install_dir = []
l_source_dir = []
l_not_installed = []
config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
generate_mesa_launcher=True
+ # first loop on products : filter products, analyse properties,
+ # and store the information that will be used to create the archive in the second loop
for prod_name, prod_info in l_product_info:
# skip product with property not_in_package set to yes
if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
or src.product.product_is_fixed(prod_info)
or not src.product.product_compiles(prod_info)):
continue
+ #
+ # products with single_dir property will be installed in the PRODUCTS directory of the archive
+ is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
+ src.product.product_test_property(prod_info,"single_install_dir", "yes"))
if src.product.check_installation(config, prod_info):
- l_install_dir.append((prod_name, prod_info.install_dir))
+ l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir,
+ is_single_dir, prod_info.install_mode))
else:
l_not_installed.append(prod_name)
config.INTERNAL.config.install_dir,
name_cpp)
if os.path.exists(install_dir):
- l_install_dir.append((name_cpp, install_dir))
+ l_install_dir.append((name_cpp, name_cpp, install_dir, False, "value"))
else:
l_not_installed.append(name_cpp)
# construct the correlation table between the product names, there
# actual install directories and there install directory in archive
d_products = {}
- for prod_name, install_dir in l_install_dir:
+ for prod_name, prod_info_name, install_dir, is_single_dir, install_mode in l_install_dir:
prod_base_name=os.path.basename(install_dir)
- if prod_base_name.startswith("config"):
- # case of a products installed in base. We remove "config-i"
- prod_base_name=os.path.basename(os.path.dirname(install_dir))
+ if install_mode == "base":
+ # case of a products installed in base.
+ # because the archive is in base:no mode, the name of the install dir is different inside archive
+ # we set it to the product name or by PRODUCTS if single-dir
+ if is_single_dir:
+ prod_base_name=config.INTERNAL.config.single_install_dir
+ else:
+ prod_base_name=prod_info_name
path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
path_in_archive = os.path.join("SOURCES", prod_name)
d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
+ # create an archives of compilation logs, and insert it into the tarball
+ logpath=os.path.join(config.APPLICATION.workdir, "LOGS")
+ path_targz_logs = os.path.join(tmp_working_dir, "logs.tgz")
+ tar_log = tarfile.open(path_targz_logs, mode='w:gz')
+ tar_log.add(logpath, arcname="LOGS")
+ tar_log.close()
+ d_products["LOGS"] = (path_targz_logs, "logs.tgz")
+
# for packages of SALOME applications including KERNEL,
# we produce a salome launcher or a virtual application (depending on salome version)
if 'KERNEL' in config.APPLICATION.products:
if options.sources:
# if we mix binaries and sources, we add a copy of the launcher,
# prefixed with "bin",in order to avoid clashes
- d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
+ launcher_copy_name="bin"+launcher_name
+ launcher_package_copy = produce_relative_launcher(config,
+ logger,
+ tmp_working_dir,
+ launcher_copy_name,
+ binaries_dir_name)
+ d_products["launcher (copy)"] = (launcher_package_copy, launcher_copy_name)
else:
# Provide a script for the creation of an application EDF style
appli_script = product_appli_creation_script(config,
else:
filename = "env_launch.sh"
d_products["environment file"] = (env_file, filename)
+
+ # If option exe, produce an extra launcher based on specified exe
+ if options.exe:
+ exe_file = produce_relative_env_files(config,
+ logger,
+ tmp_working_dir,
+ binaries_dir_name,
+ options.exe)
+
+ if src.architecture.is_windows():
+ filename = os.path.basename(options.exe) + ".bat"
+ else:
+ filename = os.path.basename(options.exe) + ".sh"
+ d_products["exe file"] = (exe_file, filename)
+
+
return d_products
def source_package(sat, config, logger, options, tmp_working_dir):
# Add salomeTools
tmp_sat = add_salomeTools(config, tmp_working_dir)
- d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
+ d_sat = {"salomeTools" : (tmp_sat, "sat")}
# Add a sat symbolic link if not win
if not src.architecture.is_windows():
- tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
try:
t = os.getcwd()
except:
# In the jobs, os.getcwd() can fail
t = config.LOCAL.workdir
os.chdir(tmp_working_dir)
- if os.path.lexists(tmp_satlink_path):
- os.remove(tmp_satlink_path)
- os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
+
+ # create a symlink, to avoid reference with "salomeTool/.."
+ os.chdir("PROJECT")
+ if os.path.lexists("ARCHIVES"):
+ os.remove("ARCHIVES")
+ os.symlink("../ARCHIVES", "ARCHIVES")
os.chdir(t)
- d_sat["sat link"] = (tmp_satlink_path, "sat")
+ d_sat["sat archive link"] = (os.path.join(tmp_working_dir,"PROJECT", "ARCHIVES"),
+ os.path.join("PROJECT", "ARCHIVES"))
d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
return d_source
# DBG.write("END sat config", sat.cfg.APPLICATION, True)
return d_archives_vcs
+def make_bin_archive(prod_name, prod_info, where):
+ '''Create an archive of a product by searching its source directory.
+
+ :param prod_name str: The name of the product.
+ :param prod_info Config: The specific configuration corresponding to the
+ product
+ :param where str: The path of the repository where to put the resulting
+ archive
+ :return: The path of the resulting archive
+ :rtype: str
+ '''
+ path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
+ tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
+ bin_path = prod_info.install_dir
+ tar_prod.add(bin_path, arcname=path_targz_prod)
+ tar_prod.close()
+ return path_targz_prod
+
def make_archive(prod_name, prod_info, where):
'''Create an archive of a product by searching its source directory.
path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
local_path = prod_info.source_dir
- tar_prod.add(local_path,
- arcname=prod_name,
- filter=exclude_VCS_and_extensions)
+ if old_python:
+ tar_prod.add(local_path,
+ arcname=prod_name,
+ exclude=exclude_VCS_and_extensions_26)
+ else:
+ tar_prod.add(local_path,
+ arcname=prod_name,
+ filter=exclude_VCS_and_extensions)
tar_prod.close()
return path_targz_prod
product_pyconf_cfg[section].archive_info.archive_name =\
p_info.name + ".tgz"
+ if (with_vcs) and src.product.product_is_vcs(p_info):
+ # in vcs mode we must replace explicitely the git server url
+ # (or it will not be found later because project files are not exported in archives)
+ for section in product_pyconf_cfg:
+ # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
+ if "git_info" in product_pyconf_cfg[section]:
+ for repo in product_pyconf_cfg[section].git_info:
+ if repo in p_info.git_info:
+ product_pyconf_cfg[section].git_info[repo] = p_info.git_info[repo]
+
# write the pyconf file to the temporary project location
product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
p_name + ".pyconf")
f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
res = src.pyconf.Config()
app = src.pyconf.deepCopyMapping(config.APPLICATION)
- # no base in packages
- if "base" in app:
- app.base = "no"
+
+ # set base mode to "no" for the archive
+ app.base = "no"
+
# Change the workdir
app.workdir = src.pyconf.Reference(
app,
# Parse the options
(options, args) = parser.parse_args(args)
+
# Check that a type of package is called, and only one
all_option_types = (options.binaries,
options.sources,
options.project not in ["", None],
- options.sat)
+ options.sat,
+ options.bin_products)
# Check if no option for package type
if all_option_types.count(True) == 0:
msg = _("Error: Precise a type for the package\nUse one of the "
"following options: --binaries, --sources, --project or"
- " --salometools")
+ " --salometools, --bin_products")
logger.write(src.printcolors.printcError(msg), 1)
logger.write("\n", 1)
return 1
-
+ do_create_package = options.binaries or options.sources or options.project or options.sat
+
+ if options.bin_products:
+ ret = bin_products_archives(runner.cfg, logger, options.with_vcs)
+ if ret!=0:
+ return ret
+ if not do_create_package:
+ return 0
+
+ # continue to create a tar.gz package
+
# The repository where to put the package if not Binary or Source
package_default_path = runner.cfg.LOCAL.workdir
-
# if the package contains binaries or sources:
- if options.binaries or options.sources:
+ if options.binaries or options.sources or options.bin_products:
# Check that the command has been called with an application
src.check_config_has_application(runner.cfg)
# Remove from config the products that have the not_in_package property
update_config(runner.cfg, logger, "not_in_package", "yes")
- # for binary packages without sources, remove compile time products
- if options.binaries and (not options.sources):
- update_config(runner.cfg, logger, "compile_time", "yes")
-
# get the name of the archive or build it
if options.name:
if os.path.basename(options.name) == options.name:
tar = tarfile.open(path_targz, mode='w:gz')
# get the filtering function if needed
- filter_function = exclude_VCS_and_extensions
+ if old_python:
+ filter_function = exclude_VCS_and_extensions_26
+ else:
+ filter_function = exclude_VCS_and_extensions
# Add the files to the tarfile object
res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)