import shutil
import datetime
import tarfile
+import codecs
+import string
+import pprint as PP
import src
from application import get_SALOME_modules
+import src.debug as DBG
BINARY = "binary"
SOURCE = "Source"
ARCHIVE_DIR = "ARCHIVES"
PROJECT_DIR = "PROJECT"
+IGNORED_DIRS = [".git", ".svn"]
+IGNORED_EXTENSIONS = []
+
PROJECT_TEMPLATE = """#!/usr/bin/env python
#-*- coding:utf-8 -*-
MACHINEPATH : $project_path + "machines/"
"""
-SITE_TEMPLATE = ("""#!/usr/bin/env python
+LOCAL_TEMPLATE = ("""#!/usr/bin/env python
#-*- coding:utf-8 -*-
-SITE :
-{
- log :
- {
- log_dir : $USER.workdir + "/LOGS"
- }
- test :{
- tmp_dir_with_application : '/tmp' + $VARS.sep + $VARS.user + """
-"""$VARS.sep + $APPLICATION.name + $VARS.sep + 'test'
- tmp_dir : '/tmp' + $VARS.sep + $VARS.user + $VARS.sep + 'test'
- timeout : 150
- }
-}
+ LOCAL :
+ {
+ base : 'default'
+ workdir : 'default'
+ log_dir : 'default'
+ archive_dir : 'default'
+ VCS : None
+ tag : None
+ }
PROJECTS :
{
_('Optional: Produce a compilable archive of the sources of the '
'application.'), False)
parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
- _('Optional: Only source package: do not make archive of vcs products.'),
+ _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
+ 'Sat prepare will use VCS mode instead to retrieve them'),
+ False)
+parser.add_option('', 'ftp', 'boolean', 'ftp',
+ _('Optional: Do not embed archives for products in archive mode.'
+ 'Sat prepare will use ftp instead to retrieve them'),
False)
parser.add_option('p', 'project', 'string', 'project',
_('Optional: Produce an archive that contains a project.'), "")
_('Optional: The list of additional files to add to the archive.'), [])
parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
_('Optional: do not add commercial licence.'), False)
+parser.add_option('', 'without_properties', 'properties', 'without_properties',
+ _('Optional: Filter the products by their properties.\n\tSyntax: '
+ '--without_properties <property>:<value>'))
-def add_files(tar, name_archive, d_content, logger):
+
+def add_files(tar, name_archive, d_content, logger, f_exclude=None):
'''Create an archive containing all directories and files that are given in
the d_content argument.
d_content[label] =
(path_on_local_machine, path_in_archive)
:param logger Logger: the logging instance
+ :param f_exclude Function: the function that filters
:return: 0 if success, 1 if not.
:rtype: int
'''
success = 0
# loop over each directory or file stored in the d_content dictionary
- for name in d_content.keys():
+ names = sorted(d_content.keys())
+ DBG.write("add tar names", names)
+
+ for name in names:
# display information
- len_points = max_len - len(name)
- logger.write(name + " " + len_points * "." + " ", 3)
- # Get the local path and the path in archive
- # of the directory or file to add
+ len_points = max_len - len(name) + 3
local_path, archive_path = d_content[name]
in_archive = os.path.join(name_archive, archive_path)
+ logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
+ # Get the local path and the path in archive
+ # of the directory or file to add
# Add it in the archive
try:
- tar.add(local_path, arcname=in_archive)
+ tar.add(local_path, arcname=in_archive, exclude=f_exclude)
logger.write(src.printcolors.printcSuccess(_("OK")), 3)
except Exception as e:
logger.write(src.printcolors.printcError(_("KO ")), 3)
logger.write("\n", 3)
return success
+def exclude_VCS_and_extensions(filename):
+ ''' The function that is used to exclude from package the link to the
+ VCS repositories (like .git)
+
+ :param filename Str: The filname to exclude (or not).
+ :return: True if the file has to be exclude
+ :rtype: Boolean
+ '''
+ for dir_name in IGNORED_DIRS:
+ if dir_name in filename:
+ return True
+ for extension in IGNORED_EXTENSIONS:
+ if filename.endswith(extension):
+ return True
+ return False
+
def produce_relative_launcher(config,
logger,
file_dir,
:rtype: str
'''
- # Get the launcher template
- profile_install_dir = os.path.join(binaries_dir_name,
- config.APPLICATION.profile.product)
- withProfile = src.fileEnviron.withProfile
+ # get KERNEL installation path
+ kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
+
+ # set kernel bin dir (considering fhs property)
+ kernel_cfg = src.product.get_product_config(config, "KERNEL")
+ if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
+ bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
+ else:
+ bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
+
+ # check if the application contains an application module
+ # check also if the application has a distene product,
+ # in this case get its licence file name
+ l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
+ salome_application_name="Not defined"
+ distene_licence_file_name=False
+ for prod_name, prod_info in l_product_info:
+ # look for a "salome application" and a distene product
+ if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
+ distene_licence_file_name = src.product.product_has_licence(prod_info,
+ config.PATHS.LICENCEPATH)
+ if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
+ salome_application_name=prod_info.name
+
+ # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
+ # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
+ if salome_application_name == "Not defined":
+ app_root_dir=kernel_root_dir
+ else:
+ app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
+
+ # Get the launcher template and do substitutions
+ if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
+ withProfile = src.fileEnviron.withProfile3
+ else:
+ withProfile = src.fileEnviron.withProfile
+
withProfile = withProfile.replace(
- "ABSOLUTE_APPLI_PATH'] = 'PROFILE_INSTALL_DIR'",
- "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + profile_install_dir + "'")
+ "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
+ "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + app_root_dir + "'")
withProfile = withProfile.replace(
- "os.path.join( 'PROFILE_INSTALL_DIR'",
- "os.path.join( out_dir_Path, '" + profile_install_dir + "'")
+ " 'BIN_KERNEL_INSTALL_DIR'",
+ " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
- before, after = withProfile.split(
- "# here your local standalone environment\n")
+ before, after = withProfile.split("# here your local standalone environment\n")
# create an environment file writer
writer = src.environment.FileEnvWriter(config,
# Little hack to put out_dir_Path outside the strings
src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
+ # A hack to put a call to a file for distene licence.
+ # It does nothing to an application that has no distene product
+ if distene_licence_file_name:
+ logger.write("Application has a distene licence file! We use it in package launcher", 5)
+ hack_for_distene_licence(filepath, distene_licence_file_name)
+
# change the rights in order to make the file executable for everybody
os.chmod(filepath,
stat.S_IRUSR |
return filepath
+def hack_for_distene_licence(filepath, licence_file):
+ '''Replace the distene licence env variable by a call to a file.
+
+ :param filepath Str: The path to the launcher to modify.
+ '''
+ shutil.move(filepath, filepath + "_old")
+ fileout= filepath
+ filein = filepath + "_old"
+ fin = open(filein, "r")
+ fout = open(fileout, "w")
+ text = fin.readlines()
+ # Find the Distene section
+ num_line = -1
+ for i,line in enumerate(text):
+ if "# Set DISTENE License" in line:
+ num_line = i
+ break
+ if num_line == -1:
+ # No distene product, there is nothing to do
+ fin.close()
+ for line in text:
+ fout.write(line)
+ fout.close()
+ return
+ del text[num_line +1]
+ del text[num_line +1]
+ text_to_insert =""" import imp
+ try:
+ distene = imp.load_source('distene_licence', '%s')
+ distene.set_distene_variables(context)
+ except:
+ pass\n""" % licence_file
+ text.insert(num_line + 1, text_to_insert)
+ for line in text:
+ fout.write(line)
+ fin.close()
+ fout.close()
+ return
+
def produce_relative_env_files(config,
logger,
file_dir,
return filepath
+def produce_install_bin_file(config,
+ logger,
+ file_dir,
+ d_sub,
+ file_name):
+ '''Create a bash shell script which do substitutions in BIRARIES dir
+ in order to use it for extra compilations.
+
+ :param config Config: The global configuration.
+ :param logger Logger: the logging instance
+ :param file_dir str: the directory where to put the files
+ :param d_sub, dict: the dictionnary that contains the substitutions to be done
+ :param file_name str: the name of the install script file
+ :return: the produced file
+ :rtype: str
+ '''
+ # Write
+ filepath = os.path.join(file_dir, file_name)
+ # open the file and write into it
+ # use codec utf-8 as sat variables are in unicode
+ with codecs.open(filepath, "w", 'utf-8') as installbin_file:
+ installbin_template_path = os.path.join(config.VARS.internal_dir,
+ "INSTALL_BIN.template")
+
+ # build the name of the directory that will contain the binaries
+ binaries_dir_name = "BINARIES-" + config.VARS.dist
+ # build the substitution loop
+ loop_cmd = "for f in $(grep -RIl"
+ for key in d_sub:
+ loop_cmd += " -e "+ key
+ loop_cmd += ' INSTALL); do\n sed -i "\n'
+ for key in d_sub:
+ loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
+ loop_cmd += ' " $f\ndone'
+
+ d={}
+ d["BINARIES_DIR"] = binaries_dir_name
+ d["SUBSTITUTION_LOOP"]=loop_cmd
+
+ # substitute the template and write it in file
+ content=src.template.substitute(installbin_template_path, d)
+ installbin_file.write(content)
+ # change the rights in order to make the file executable for everybody
+ os.chmod(filepath,
+ stat.S_IRUSR |
+ stat.S_IRGRP |
+ stat.S_IROTH |
+ stat.S_IWUSR |
+ stat.S_IXUSR |
+ stat.S_IXGRP |
+ stat.S_IXOTH)
+
+ return filepath
+
def product_appli_creation_script(config,
logger,
file_dir,
'''
# Get the list of product installation to add to the archive
- l_products_name = config.APPLICATION.products.keys()
+ l_products_name = sorted(config.APPLICATION.products.keys())
l_product_info = src.product.get_products_infos(l_products_name,
config)
l_install_dir = []
l_source_dir = []
l_not_installed = []
l_sources_not_present = []
+ generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
+ if ("APPLICATION" in config and
+ "properties" in config.APPLICATION and
+ "mesa_launcher_in_package" in config.APPLICATION.properties and
+ config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
+ generate_mesa_launcher=True
+
for prod_name, prod_info in l_product_info:
- # ignore the native and fixed products
+ # skip product with property not_in_package set to yes
+ if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
+ continue
+
+ # Add the sources of the products that have the property
+ # sources_in_package : "yes"
+ if src.get_property_in_product_cfg(prod_info,
+ "sources_in_package") == "yes":
+ if os.path.exists(prod_info.source_dir):
+ l_source_dir.append((prod_name, prod_info.source_dir))
+ else:
+ l_sources_not_present.append(prod_name)
+
+ # ignore the native and fixed products for install directories
if (src.product.product_is_native(prod_info)
or src.product.product_is_fixed(prod_info)
or not src.product.product_compiles(prod_info)):
else:
l_not_installed.append(name_cpp)
- # Add the sources of the products that have the property
- # sources_in_package : "yes"
- if src.get_property_in_product_cfg(prod_info,
- "sources_in_package") == "yes":
- if os.path.exists(prod_info.source_dir):
- l_source_dir.append((prod_name, prod_info.source_dir))
- else:
- l_sources_not_present.append(prod_name)
-
+ # check the name of the directory that (could) contains the binaries
+ # from previous detar
+ binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
+ if os.path.exists(binaries_from_detar):
+ logger.write("""
+WARNING: existing binaries directory from previous detar installation:
+ %s
+ To make new package from this, you have to:
+ 1) install binaries in INSTALL directory with the script "install_bin.sh"
+ see README file for more details
+ 2) or recompile everything in INSTALL with "sat compile" command
+ this step is long, and requires some linux packages to be installed
+ on your system\n
+""" % binaries_from_detar)
+
# Print warning or error if there are some missing products
if len(l_not_installed) > 0:
text_missing_prods = ""
d_products = {}
for prod_name, install_dir in l_install_dir:
path_in_archive = os.path.join(binaries_dir_name, prod_name)
- d_products[prod_name] = (install_dir, path_in_archive)
+ d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
for prod_name, source_dir in l_source_dir:
path_in_archive = os.path.join("SOURCES", prod_name)
- d_products[prod_name] = (source_dir, path_in_archive)
-
- # create the relative launcher and add it to the files to add
- if ("profile" in config.APPLICATION and
- "product" in config.APPLICATION.profile):
- launcher_name = config.APPLICATION.profile.launcher_name
- launcher_package = produce_relative_launcher(config,
- logger,
- tmp_working_dir,
- launcher_name,
- binaries_dir_name,
- not(options.without_commercial))
-
- d_products["launcher"] = (launcher_package, launcher_name)
- else:
- # No profile, it means that there has to be some environment files
- env_file = produce_relative_env_files(config,
- logger,
- tmp_working_dir,
- binaries_dir_name)
+ d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
+
+ # for packages of SALOME applications including KERNEL,
+ # we produce a salome launcher or a virtual application (depending on salome version)
+ if 'KERNEL' in config.APPLICATION.products:
+ VersionSalome = src.get_salome_version(config)
+ # Case where SALOME has the launcher that uses the SalomeContext API
+ if VersionSalome >= 730:
+ # create the relative launcher and add it to the files to add
+ launcher_name = src.get_launcher_name(config)
+ launcher_package = produce_relative_launcher(config,
+ logger,
+ tmp_working_dir,
+ launcher_name,
+ binaries_dir_name,
+ not(options.without_commercial))
+ d_products["launcher"] = (launcher_package, launcher_name)
+
+ # if the application contains mesa products, we generate in addition to the
+ # classical salome launcher a launcher using mesa and called mesa_salome
+ # (the mesa launcher will be used for remote usage through ssh).
+ if generate_mesa_launcher:
+ #if there is one : store the use_mesa property
+ restore_use_mesa_option=None
+ if ('properties' in config.APPLICATION and
+ 'use_mesa' in config.APPLICATION.properties):
+ restore_use_mesa_option = config.APPLICATION.properties.use_mesa
+
+ # activate mesa property, and generate a mesa launcher
+ src.activate_mesa_property(config) #activate use_mesa property
+ launcher_mesa_name="mesa_"+launcher_name
+ launcher_package_mesa = produce_relative_launcher(config,
+ logger,
+ tmp_working_dir,
+ launcher_mesa_name,
+ binaries_dir_name,
+ not(options.without_commercial))
+ d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
+
+ # if there was a use_mesa value, we restore it
+ # else we set it to the default value "no"
+ if restore_use_mesa_option != None:
+ config.APPLICATION.properties.use_mesa=restore_use_mesa_option
+ else:
+ config.APPLICATION.properties.use_mesa="no"
- d_products["environment file"] = (env_file, "env_launch.sh")
-
- # And provide a script for the creation of an application EDF style
- appli_script = product_appli_creation_script(config,
- logger,
- tmp_working_dir,
- binaries_dir_name)
-
- d_products["appli script"] = (appli_script, "create_appli.py")
-
+ if options.sources:
+ # if we mix binaries and sources, we add a copy of the launcher,
+ # prefixed with "bin",in order to avoid clashes
+ d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
+ else:
+ # Provide a script for the creation of an application EDF style
+ appli_script = product_appli_creation_script(config,
+ logger,
+ tmp_working_dir,
+ binaries_dir_name)
+
+ d_products["appli script"] = (appli_script, "create_appli.py")
+
+ # Put also the environment file
+ env_file = produce_relative_env_files(config,
+ logger,
+ tmp_working_dir,
+ binaries_dir_name)
+
+ d_products["environment file"] = (env_file, "env_launch.sh")
+
return d_products
def source_package(sat, config, logger, options, tmp_working_dir):
:rtype: dict
'''
+ d_archives={}
# Get all the products that are prepared using an archive
- logger.write("Find archive products ... ")
- d_archives, l_pinfo_vcs = get_archives(config, logger)
- logger.write("Done\n")
+ # unless ftp mode is specified (in this case the user of the
+ # archive will get the sources through the ftp mode of sat prepare
+ if not options.ftp:
+ logger.write("Find archive products ... ")
+ d_archives, l_pinfo_vcs = get_archives(config, logger)
+ logger.write("Done\n")
+
d_archives_vcs = {}
if not options.with_vcs and len(l_pinfo_vcs) > 0:
# Make archives with the products that are not prepared using an archive
# Create a project
logger.write("Create the project ... ")
d_project = create_project_for_src_package(config,
- tmp_working_dir,
- options.with_vcs)
+ tmp_working_dir,
+ options.with_vcs,
+ options.ftp)
logger.write("Done\n")
# Add salomeTools
t = os.getcwd()
except:
# In the jobs, os.getcwd() can fail
- t = config.USER.workdir
+ t = config.LOCAL.workdir
os.chdir(tmp_working_dir)
if os.path.lexists(tmp_satlink_path):
os.remove(tmp_satlink_path)
d_sat["sat link"] = (tmp_satlink_path, "sat")
- return src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
+ d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
+ return d_source
def get_archives(config, logger):
'''Find all the products that are get using an archive and all the products
d_archives = {}
l_pinfo_vcs = []
for p_name, p_info in l_product_info:
+ # skip product with property not_in_package set to yes
+ if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
+ continue
# ignore the native and fixed products
if (src.product.product_is_native(p_info)
or src.product.product_is_fixed(p_info)):
return d_archives, l_pinfo_vcs
def add_salomeTools(config, tmp_working_dir):
- '''Prepare a version of salomeTools that has a specific site.pyconf file
+ '''Prepare a version of salomeTools that has a specific local.pyconf file
configured for a source package.
:param config Config: The global configuration.
sat_running_path = src.Path(config.VARS.salometoolsway)
sat_running_path.copy(sat_tmp_path)
- # Update the site.pyconf file that contains the path to the project
- site_pyconf_name = "site.pyconf"
- site_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
- site_pyconf_file = os.path.join(site_pyconf_dir, site_pyconf_name)
- ff = open(site_pyconf_file, "w")
- ff.write(SITE_TEMPLATE)
+ # Update the local.pyconf file that contains the path to the project
+ local_pyconf_name = "local.pyconf"
+ local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
+ local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
+ # Remove the .pyconf file in the root directory of salomeTools if there is
+ # any. (For example when launching jobs, a pyconf file describing the jobs
+ # can be here and is not useful)
+ files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
+ for file_or_dir in files_or_dir_SAT:
+ if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
+ file_path = os.path.join(tmp_working_dir,
+ "salomeTools",
+ file_or_dir)
+ os.remove(file_path)
+
+ ff = open(local_pyconf_file, "w")
+ ff.write(LOCAL_TEMPLATE)
ff.close()
return sat_tmp_path.path
# clean the source directory of all the vcs products, then use the source
# command and thus construct an archive that will not contain the patches
l_prod_names = [pn for pn, __ in l_pinfo_vcs]
- # clean
- logger.write(_("clean sources\n"))
- args_clean = config.VARS.application
- args_clean += " --sources --products "
- args_clean += ",".join(l_prod_names)
- sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
- # source
- logger.write(_("get sources"))
- args_source = config.VARS.application
- args_source += " --products "
- args_source += ",".join(l_prod_names)
- sat.source(args_source, batch=True, verbose=0, logger_add_link = logger)
-
- # make the new archives
- d_archives_vcs = {}
- for pn, pinfo in l_pinfo_vcs:
- path_archive = make_archive(pn, pinfo, tmp_working_dir)
- d_archives_vcs[pn] = (path_archive,
- os.path.join(ARCHIVE_DIR, pn + ".tgz"))
+ if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
+ logger.write(_("\nclean sources\n"))
+ args_clean = config.VARS.application
+ args_clean += " --sources --products "
+ args_clean += ",".join(l_prod_names)
+ logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
+ sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
+ if True:
+ # source
+ logger.write(_("get sources\n"))
+ args_source = config.VARS.application
+ args_source += " --products "
+ args_source += ",".join(l_prod_names)
+ svgDir = sat.cfg.APPLICATION.workdir
+ tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
+ sat.cfg.APPLICATION.workdir = tmp_local_working_dir
+ # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
+ # DBG.write("sat config id", id(sat.cfg), True)
+ # shit as config is not same id() as for sat.source()
+ # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
+ import source
+ source.run(args_source, sat, logger) #use this mode as runner.cfg reference
+
+ # make the new archives
+ d_archives_vcs = {}
+ for pn, pinfo in l_pinfo_vcs:
+ path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
+ logger.write("make archive vcs '%s'\n" % path_archive)
+ d_archives_vcs[pn] = (path_archive,
+ os.path.join(ARCHIVE_DIR, pn + ".tgz"))
+ sat.cfg.APPLICATION.workdir = svgDir
+ # DBG.write("END sat config", sat.cfg.APPLICATION, True)
return d_archives_vcs
def make_archive(prod_name, prod_info, where):
path_targz_prod = os.path.join(where, prod_name + ".tgz")
tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
local_path = prod_info.source_dir
- tar_prod.add(local_path, arcname=prod_name)
+ tar_prod.add(local_path,
+ arcname=prod_name,
+ exclude=exclude_VCS_and_extensions)
tar_prod.close()
return path_targz_prod
-def create_project_for_src_package(config, tmp_working_dir, with_vcs):
+def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
'''Create a specific project for a source package.
:param config Config: The global configuration.
source package
:param with_vcs boolean: True if the package is with vcs products (not
transformed into archive products)
+ :param with_ftp boolean: True if the package use ftp servers to get archives
:return: The dictionary
{"project" : (produced project, project path in the archive)}
:rtype: Dict
project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
ff = open(project_pyconf_file, "w")
ff.write(PROJECT_TEMPLATE)
+ if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
+ ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
+ for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
+ ftp_path=ftp_path+":"+ftpserver
+ ftp_path+='"'
+ ff.write("# ftp servers where to search for prerequisite archives\n")
+ ff.write(ftp_path)
+ # add licence paths if any
+ if len(config.PATHS.LICENCEPATH) > 0:
+ licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
+ for path in config.PATHS.LICENCEPATH[1:]:
+ licence_path=licence_path+":"+path
+ licence_path+='"'
+ ff.write("\n# Where to search for licences\n")
+ ff.write(licence_path)
+
+
ff.close()
# Loop over the products to get there pyconf and all the scripts
lproducts_name = config.APPLICATION.products.keys()
l_products = src.product.get_products_infos(lproducts_name, config)
for p_name, p_info in l_products:
- # ignore native and fixed products
- if (src.product.product_is_native(p_info) or
- src.product.product_is_fixed(p_info)):
- continue
+ # skip product with property not_in_package set to yes
+ if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
+ continue
find_product_scripts_and_pyconf(p_name,
p_info,
config,
# Prevent from compilation in base
application_pyconf_cfg.APPLICATION.no_base = "yes"
+ #remove products that are not in config (which were filtered by --without_properties)
+ for product_name in application_pyconf_cfg.APPLICATION.products.keys():
+ if product_name not in config.APPLICATION.products.keys():
+ application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
+
# write the pyconf file to the temporary application location
application_tmp_pyconf_path = os.path.join(application_tmp_dir,
application_name + ".pyconf")
+
ff = open(application_tmp_pyconf_path, 'w')
ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
application_pyconf_cfg.__save__(ff, 1)
ff.close()
-def project_package(project_file_path, tmp_working_dir):
+def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, logger):
'''Prepare a dictionary that stores all the needed directories and files to
add in a project package.
:param project_file_path str: The path to the local project.
+ :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
:param tmp_working_dir str: The temporary local directory containing some
specific directories or files needed in the
project package
'''
d_project = {}
# Read the project file and get the directories to add to the package
- project_pyconf_cfg = src.pyconf.Config(project_file_path)
- paths = {"ARCHIVEPATH" : "archives",
- "APPLICATIONPATH" : "applications",
+
+ try:
+ project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
+ except:
+ logger.write("""
+WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
+ project_pyconf_cfg = src.pyconf.Config(project_file_path)
+ project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
+
+ paths = {"APPLICATIONPATH" : "applications",
"PRODUCTPATH" : "products",
"JOBPATH" : "jobs",
"MACHINEPATH" : "machines"}
+ if not ftp_mode:
+ paths["ARCHIVEPATH"] = "archives"
+
# Loop over the project paths and add it
for path in paths:
if path not in project_pyconf_cfg:
return d_project
-def add_readme(config, package_type, where):
+def add_readme(config, options, where):
readme_path = os.path.join(where, "README")
- f = open(readme_path, 'w')
- # prepare substitution dictionary
- d = dict()
- if package_type == BINARY:
- d['application'] = config.VARS.application
+ with codecs.open(readme_path, "w", 'utf-8') as f:
+
+ # templates for building the header
+ readme_header="""
+# This package was generated with sat $version
+# Date: $date
+# User: $user
+# Distribution : $dist
+
+In the following, $$ROOT represents the directory where you have installed
+SALOME (the directory where this file is located).
+
+"""
+ readme_compilation_with_binaries="""
+
+compilation based on the binaries used as prerequisites
+=======================================================
+
+If you fail to compile the complete application (for example because
+you are not root on your system and cannot install missing packages), you
+may try a partial compilation based on the binaries.
+For that it is necessary to copy the binaries from BINARIES to INSTALL,
+and do some substitutions on cmake and .la files (replace the build directories
+with local paths).
+The procedure to do it is:
+ 1) Remove or rename INSTALL directory if it exists
+ 2) Execute the shell script install_bin.sh:
+ > cd $ROOT
+ > ./install_bin.sh
+ 3) Use SalomeTool (as explained in Sources section) and compile only the
+ modules you need to (with -p option)
+
+"""
+ readme_header_tpl=string.Template(readme_header)
+ readme_template_path_bin = os.path.join(config.VARS.internal_dir,
+ "README_BIN.template")
+ readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
+ "README_LAUNCHER.template")
+ readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
+ "README_BIN_VIRTUAL_APP.template")
+ readme_template_path_src = os.path.join(config.VARS.internal_dir,
+ "README_SRC.template")
+ readme_template_path_pro = os.path.join(config.VARS.internal_dir,
+ "README_PROJECT.template")
+ readme_template_path_sat = os.path.join(config.VARS.internal_dir,
+ "README_SAT.template")
+
+ # prepare substitution dictionary
+ d = dict()
d['user'] = config.VARS.user
d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
d['version'] = config.INTERNAL.sat_version
d['dist'] = config.VARS.dist
- if 'profile' in config.APPLICATION:
- d['launcher'] = config.APPLICATION.profile.launcher_name
- readme_template_path = os.path.join(config.VARS.internal_dir,
- "README_BIN.template")
- else:
- d['env_file'] = 'env_launch.sh'
- readme_template_path = os.path.join(config.VARS.internal_dir,
- "README_BIN_NO_PROFILE.template")
-
- if package_type == SOURCE:
- d['application'] = config.VARS.application
- d['user'] = config.VARS.user
- d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
- d['version'] = config.INTERNAL.sat_version
- if 'profile' in config.APPLICATION:
- d['profile'] = config.APPLICATION.profile.product
- d['launcher'] = config.APPLICATION.profile.launcher_name
- readme_template_path = os.path.join(config.VARS.internal_dir,
- "README_SRC.template")
+ f.write(readme_header_tpl.substitute(d)) # write the general header (common)
+
+ if options.binaries or options.sources:
+ d['application'] = config.VARS.application
+ f.write("# Application: " + d['application'] + "\n")
+ if 'KERNEL' in config.APPLICATION.products:
+ VersionSalome = src.get_salome_version(config)
+ # Case where SALOME has the launcher that uses the SalomeContext API
+ if VersionSalome >= 730:
+ d['launcher'] = config.APPLICATION.profile.launcher_name
+ else:
+ d['virtual_app'] = 'runAppli' # this info is not used now)
- if package_type == PROJECT:
- d['user'] = config.VARS.user
- d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
- d['version'] = config.INTERNAL.sat_version
- readme_template_path = os.path.join(config.VARS.internal_dir,
- "README_PROJECT.template")
+ # write the specific sections
+ if options.binaries:
+ f.write(src.template.substitute(readme_template_path_bin, d))
+ if "virtual_app" in d:
+ f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
+ if "launcher" in d:
+ f.write(src.template.substitute(readme_template_path_bin_launcher, d))
- if package_type == SAT:
- d['user'] = config.VARS.user
- d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
- d['version'] = config.INTERNAL.sat_version
- readme_template_path = os.path.join(config.VARS.internal_dir,
- "README_SAT.template")
-
- f.write(src.template.substitute(readme_template_path, d))
+ if options.sources:
+ f.write(src.template.substitute(readme_template_path_src, d))
+
+ if options.binaries and options.sources:
+ f.write(readme_compilation_with_binaries)
+
+ if options.project:
+ f.write(src.template.substitute(readme_template_path_pro, d))
+
+ if options.sat:
+ f.write(src.template.substitute(readme_template_path_sat, d))
return readme_path
-
+
+def update_config(config, prop, value):
+ '''Remove from config.APPLICATION.products the products that have the property given as input.
+
+ :param config Config: The global config.
+ :param prop str: The property to filter
+ :param value str: The value of the property to filter
+ '''
+ # if there is no APPLICATION (ex sat package -t) : nothing to do
+ if "APPLICATION" in config:
+ l_product_to_remove = []
+ for product_name in config.APPLICATION.products.keys():
+ prod_cfg = src.product.get_product_config(config, product_name)
+ if src.get_property_in_product_cfg(prod_cfg, prop) == value:
+ l_product_to_remove.append(product_name)
+ for product_name in l_product_to_remove:
+ config.APPLICATION.products.__delitem__(product_name)
def description():
'''method that is called when salomeTools is called with --help option.
:return: The text to display for the package command description.
:rtype: str
'''
- return _("The package command creates an archive.\nThere are 4 kinds of "
- "archive:\n 1- The binary archive. It contains all the product "
- "installation directories and a launcher,\n 2- The sources archive."
- " It contains the products archives, a project corresponding to "
- "the application and salomeTools,\n 3- The project archive. It "
- "contains a project (give the project file path as argument),\n 4-"
- " The salomeTools archive. It contains salomeTools.\n\nexample:"
- "\nsat package SALOME-master --sources")
+ return _("""
+The package command creates a tar file archive of a product.
+There are four kinds of archive, which can be mixed:
+
+ 1 - The binary archive.
+ It contains the product installation directories plus a launcher.
+ 2 - The sources archive.
+ It contains the product archives, a project (the application plus salomeTools).
+ 3 - The project archive.
+ It contains a project (give the project file path as argument).
+ 4 - The salomeTools archive.
+ It contains code utility salomeTools.
+
+example:
+ >> sat package SALOME-master --binaries --sources""")
def run(args, runner, logger):
'''method that is called when salomeTools is called with package parameter.
# Parse the options
(options, args) = parser.parse_args(args)
-
+
# Check that a type of package is called, and only one
all_option_types = (options.binaries,
options.sources,
logger.write("\n", 1)
return 1
- # Check for only one option for package type
- if all_option_types.count(True) > 1:
- msg = _("Error: You can use only one type for the package\nUse only one"
- " of the following options: --binaries, --sources, --project or"
- " --salometools")
- logger.write(src.printcolors.printcError(msg), 1)
- logger.write("\n", 1)
- return 1
-
- # Get the package type
- if options.binaries:
- package_type = BINARY
- if options.sources:
- package_type = SOURCE
- if options.project:
- package_type = PROJECT
- if options.sat:
- package_type = SAT
-
# The repository where to put the package if not Binary or Source
- package_default_path = runner.cfg.USER.workdir
+ package_default_path = runner.cfg.LOCAL.workdir
- if package_type in [BINARY, SOURCE]:
+ # if the package contains binaries or sources:
+ if options.binaries or options.sources:
# Check that the command has been called with an application
src.check_config_has_application(runner.cfg)
runner.cfg.VARS.application), 1)
# Get the default directory where to put the packages
- package_default_path = os.path.join(runner.cfg.APPLICATION.workdir,
- "PACKAGE")
+ package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
src.ensure_path_exists(package_default_path)
- elif package_type == PROJECT:
+ # if the package contains a project:
+ if options.project:
# check that the project is visible by SAT
- if options.project not in runner.cfg.PROJECTS.project_file_paths:
- site_path = os.path.join(runner.cfg.VARS.salometoolsway,
- "data",
- "site.pyconf")
- msg = _("ERROR: the project %(proj)s is not visible by salomeTools."
- "\nPlease add it in the %(site)s file." % {
- "proj" : options.project, "site" : site_path})
+ projectNameFile = options.project + ".pyconf"
+ foundProject = None
+ for i in runner.cfg.PROJECTS.project_file_paths:
+ baseName = os.path.basename(i)
+ if baseName == projectNameFile:
+ foundProject = i
+ break
+
+ if foundProject is None:
+ local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
+ msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
+known projects are:
+%(2)s
+
+Please add it in file:
+%(3)s""" % \
+ {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
logger.write(src.printcolors.printcError(msg), 1)
logger.write("\n", 1)
return 1
+ else:
+ options.project_file_path = foundProject
+ src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
- # Print
- src.printcolors.print_value(logger, "Package type", package_type, 2)
-
- # get the name of the archive or construct it
+ # Remove the products that are filtered by the --without_properties option
+ if options.without_properties:
+ app = runner.cfg.APPLICATION
+ logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
+ prop, value = options.without_properties
+ update_config(runner.cfg, prop, value)
+ logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
+
+ # Remove from config the products that have the not_in_package property
+ update_config(runner.cfg, "not_in_package", "yes")
+
+ # get the name of the archive or build it
if options.name:
if os.path.basename(options.name) == options.name:
# only a name (not a path)
archive_name = archive_name[:-len(".tar.gz")]
else:
+ archive_name=""
dir_name = package_default_path
- if package_type == BINARY:
- archive_name = (runner.cfg.APPLICATION.name +
- "-" +
- runner.cfg.VARS.dist)
+ if options.binaries or options.sources:
+ archive_name = runner.cfg.APPLICATION.name
+
+ if options.binaries:
+ archive_name += "-"+runner.cfg.VARS.dist
- if package_type == SOURCE:
- archive_name = (runner.cfg.APPLICATION.name +
- "-" +
- "SRC")
+ if options.sources:
+ archive_name += "-SRC"
if options.with_vcs:
- archive_name = (runner.cfg.APPLICATION.name +
- "-" +
- "SRC" +
- "-" +
- "VCS")
-
- if package_type == PROJECT:
- project_name, __ = os.path.splitext(
- os.path.basename(options.project))
- archive_name = ("PROJECT" +
- "-" +
- project_name)
+ archive_name += "-VCS"
+
+ if options.project:
+ project_name = options.project
+ archive_name += ("PROJECT-" + project_name)
- if package_type == SAT:
- archive_name = ("salomeTools" +
- "-" +
- runner.cfg.INTERNAL.sat_version)
+ if options.sat:
+ archive_name += ("salomeTools_" + runner.cfg.INTERNAL.sat_version)
+ if len(archive_name)==0: # no option worked
+ msg = _("Error: Cannot name the archive\n"
+ " check if at least one of the following options was "
+ "selected : --binaries, --sources, --project or"
+ " --salometools")
+ logger.write(src.printcolors.printcError(msg), 1)
+ logger.write("\n", 1)
+ return 1
path_targz = os.path.join(dir_name, archive_name + ".tgz")
- # Print the path of the package
src.printcolors.print_value(logger, "Package path", path_targz, 2)
# Create a working directory for all files that are produced during the
# package creation and that will be removed at the end of the command
- tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root,
- runner.cfg.VARS.datehour)
+ tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
src.ensure_path_exists(tmp_working_dir)
logger.write("\n", 5)
logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
msg = _("Preparation of files to add to the archive")
logger.write(src.printcolors.printcLabel(msg), 2)
logger.write("\n", 2)
+
+ d_files_to_add={} # content of the archive
- if package_type == BINARY:
- d_files_to_add = binary_package(runner.cfg,
- logger,
- options,
- tmp_working_dir)
- if not(d_files_to_add):
- return 1
+ # a dict to hold paths that will need to be substitute for users recompilations
+ d_paths_to_substitute={}
- if package_type == SOURCE:
- d_files_to_add = source_package(runner,
+ if options.binaries:
+ d_bin_files_to_add = binary_package(runner.cfg,
+ logger,
+ options,
+ tmp_working_dir)
+ # for all binaries dir, store the substitution that will be required
+ # for extra compilations
+ for key in d_bin_files_to_add:
+ if key.endswith("(bin)"):
+ source_dir = d_bin_files_to_add[key][0]
+ path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" + runner.cfg.VARS.dist,"INSTALL")
+ if os.path.basename(source_dir)==os.path.basename(path_in_archive):
+ # if basename is the same we will just substitute the dirname
+ d_paths_to_substitute[os.path.dirname(source_dir)]=\
+ os.path.dirname(path_in_archive)
+ else:
+ d_paths_to_substitute[source_dir]=path_in_archive
+
+ d_files_to_add.update(d_bin_files_to_add)
+
+ if options.sources:
+ d_files_to_add.update(source_package(runner,
runner.cfg,
logger,
options,
- tmp_working_dir)
-
- if package_type == PROJECT:
- d_files_to_add = project_package(options.project, tmp_working_dir)
+ tmp_working_dir))
+ if options.binaries:
+ # for archives with bin and sources we provide a shell script able to
+ # install binaries for compilation
+ file_install_bin=produce_install_bin_file(runner.cfg,logger,
+ tmp_working_dir,
+ d_paths_to_substitute,
+ "install_bin.sh")
+ d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
+ logger.write("substitutions that need to be done later : \n", 5)
+ logger.write(str(d_paths_to_substitute), 5)
+ logger.write("\n", 5)
+ else:
+ # --salomeTool option is not considered when --sources is selected, as this option
+ # already brings salomeTool!
+ if options.sat:
+ d_files_to_add.update({"salomeTools" : (runner.cfg.VARS.salometoolsway, "")})
+
+ if options.project:
+ DBG.write("config for package %s" % project_name, runner.cfg)
+ d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, logger))
+
+ if not(d_files_to_add):
+ msg = _("Error: Empty dictionnary to build the archive!\n")
+ logger.write(src.printcolors.printcError(msg), 1)
+ logger.write("\n", 1)
+ return 1
- if package_type == SAT:
- d_files_to_add = {"salomeTools" : (runner.cfg.VARS.salometoolsway, "")}
-
# Add the README file in the package
- local_readme_tmp_path = add_readme(runner.cfg,
- package_type,
- tmp_working_dir)
+ local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
d_files_to_add["README"] = (local_readme_tmp_path, "README")
# Add the additional files of option add_files
d_files_to_add[file_name] = (file_path, file_name)
logger.write("\n", 2)
-
logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
logger.write("\n", 2)
-
+ logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
+
+ res = 0
try:
# Creating the object tarfile
tar = tarfile.open(path_targz, mode='w:gz')
+ # get the filtering function if needed
+ filter_function = exclude_VCS_and_extensions
+
# Add the files to the tarfile object
- res = add_files(tar, archive_name, d_files_to_add, logger)
+ res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
tar.close()
except KeyboardInterrupt:
logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
- logger.write(_("Removing the temporary working directory ... "), 1)
+ logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
# remove the working directory
shutil.rmtree(tmp_working_dir)
logger.write(_("OK"), 1)
logger.write(_("\n"), 1)
return 1
- # remove the working directory
- shutil.rmtree(tmp_working_dir)
+ # case if no application, only package sat as 'sat package -t'
+ try:
+ app = runner.cfg.APPLICATION
+ except:
+ app = None
+
+ # unconditionaly remove the tmp_local_working_dir
+ if app is not None:
+ tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
+ if os.path.isdir(tmp_local_working_dir):
+ shutil.rmtree(tmp_local_working_dir)
+
+ # have to decide some time
+ DBG.tofix("make shutil.rmtree('%s') effective" % tmp_working_dir, "", DBG.isDeveloper())
# Print again the path of the package
logger.write("\n", 2)