X-Git-Url: http://git.salome-platform.org/gitweb/?a=blobdiff_plain;f=commands%2Fpackage.py;h=bbb20bed7f62b1bbd22c5b62f9d4f5d9deb14ee2;hb=512d8e507e03381f229b389baceef8a9e6afda9f;hp=1e3ca2d35b389fd8cabf5a7e03c69e5b2559a9a3;hpb=11fef9f12704abc061e16e6e0335258b0b59ad66;p=tools%2Fsat.git diff --git a/commands/package.py b/commands/package.py index 1e3ca2d..bbb20be 100644 --- a/commands/package.py +++ b/commands/package.py @@ -25,12 +25,14 @@ import codecs import string import glob import pprint as PP - +import sys import src from application import get_SALOME_modules import src.debug as DBG +old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6 + BINARY = "binary" SOURCE = "Source" PROJECT = "Project" @@ -42,7 +44,10 @@ PROJECT_DIR = "PROJECT" IGNORED_DIRS = [".git", ".svn"] IGNORED_EXTENSIONS = [] -PROJECT_TEMPLATE = """#!/usr/bin/env python +PACKAGE_EXT=".tar.gz" # the extension we use for the packages + +if src.architecture.is_windows(): + PROJECT_TEMPLATE = """#!/usr/bin/env python #-*- coding:utf-8 -*- # The path to the archive root directory @@ -61,6 +66,25 @@ JOBPATH : $project_path + "jobs/" # Where to search the pyconf of the machines of the project MACHINEPATH : $project_path + "machines/" """ +else: + PROJECT_TEMPLATE = """#!/usr/bin/env python +#-*- coding:utf-8 -*- + +# path to the PROJECT +project_path : $PWD + "/" + +# Where to search the archives of the products +ARCHIVEPATH : $project_path + "ARCHIVES" +# Where to search the pyconf of the applications +APPLICATIONPATH : $project_path + "applications/" +# Where to search the pyconf of the products +PRODUCTPATH : $project_path + "products/" +# Where to search the pyconf of the jobs of the project +JOBPATH : $project_path + "jobs/" +# Where to search the pyconf of the machines of the project +MACHINEPATH : $project_path + "machines/" +""" + LOCAL_TEMPLATE = ("""#!/usr/bin/env python #-*- coding:utf-8 -*- @@ -71,14 +95,15 @@ LOCAL_TEMPLATE = ("""#!/usr/bin/env python workdir : 'default' log_dir : 'default' archive_dir : 'default' - VCS : None - tag : None + VCS : 'unknown' + tag : 'unknown' } PROJECTS : { -project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep""" -""" + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"] + project_file_paths : + [ + ] } """) @@ -92,14 +117,19 @@ parser.add_option('f', 'force_creation', 'boolean', 'force_creation', parser.add_option('s', 'sources', 'boolean', 'sources', _('Optional: Produce a compilable archive of the sources of the ' 'application.'), False) +parser.add_option('', 'bin_products', 'boolean', 'bin_products', + _('Optional: Create binary archives for all products.'), False) parser.add_option('', 'with_vcs', 'boolean', 'with_vcs', _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' - 'Sat prepare will use VCS mode instead to retrieve them'), + 'Sat prepare will use VCS mode instead to retrieve them.' + '\n Also, when combined with "--bin_products" option, restrict the building of product archives to VCS products.'), False) parser.add_option('', 'ftp', 'boolean', 'ftp', _('Optional: Do not embed archives for products in archive mode.' 'Sat prepare will use ftp instead to retrieve them'), False) +parser.add_option('e', 'exe', 'string', 'exe', + _('Optional: Produce an extra launcher based upon the exe given as argument.'), "") parser.add_option('p', 'project', 'string', 'project', _('Optional: Produce an archive that contains a project.'), "") parser.add_option('t', 'salometools', 'boolean', 'sat', @@ -150,7 +180,14 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None): try: key=local_path+"->"+in_archive if key not in already_added: - tar.add(local_path, arcname=in_archive, exclude=f_exclude) + if old_python: + tar.add(local_path, + arcname=in_archive, + exclude=exclude_VCS_and_extensions_26) + else: + tar.add(local_path, + arcname=in_archive, + filter=exclude_VCS_and_extensions) already_added.add(key) logger.write(src.printcolors.printcSuccess(_("OK")), 3) except Exception as e: @@ -160,9 +197,10 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None): logger.write("\n", 3) return success -def exclude_VCS_and_extensions(filename): + +def exclude_VCS_and_extensions_26(filename): ''' The function that is used to exclude from package the link to the - VCS repositories (like .git) + VCS repositories (like .git) (only for python 2.6) :param filename Str: The filname to exclude (or not). :return: True if the file has to be exclude @@ -176,6 +214,23 @@ def exclude_VCS_and_extensions(filename): return True return False +def exclude_VCS_and_extensions(tarinfo): + ''' The function that is used to exclude from package the link to the + VCS repositories (like .git) + + :param filename Str: The filname to exclude (or not). + :return: None if the file has to be exclude + :rtype: tarinfo or None + ''' + filename = tarinfo.name + for dir_name in IGNORED_DIRS: + if dir_name in filename: + return None + for extension in IGNORED_EXTENSIONS: + if filename.endswith(extension): + return None + return tarinfo + def produce_relative_launcher(config, logger, file_dir, @@ -194,8 +249,21 @@ def produce_relative_launcher(config, :rtype: str ''' + # set base mode to "no" for the archive - save current mode to restore it at the end + if "base" in config.APPLICATION: + base_setting=config.APPLICATION.base + else: + base_setting="maybe" + config.APPLICATION.base="no" + # get KERNEL installation path - kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL") + kernel_info = src.product.get_product_config(config, "KERNEL") + kernel_base_name=os.path.basename(kernel_info.install_dir) + if kernel_info.install_mode == "base": + # case of kernel installed in base. the kernel install dir name is different in the archive + kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir)) + + kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name) # set kernel bin dir (considering fhs property) kernel_cfg = src.product.get_product_config(config, "KERNEL") @@ -234,6 +302,8 @@ def produce_relative_launcher(config, additional_env['sat_python_version'] = 2 additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir + launcher_name = src.get_launcher_name(config) + additional_env['APPLI'] = "out_dir_Path" + config.VARS.sep + file_name # create an environment file writer writer = src.environment.FileEnvWriter(config, @@ -253,7 +323,7 @@ def produce_relative_launcher(config, # Little hack to put out_dir_Path outside the strings src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' ) - src.replace_in_file(filepath, "'out_dir_Path + ", "out_dir_Path + '" ) + src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" ) # A hack to put a call to a file for distene licence. # It does nothing to an application that has no distene product @@ -271,6 +341,9 @@ def produce_relative_launcher(config, stat.S_IXGRP | stat.S_IXOTH) + # restore modified setting by its initial value + config.APPLICATION.base=base_setting + return filepath def hack_for_distene_licence(filepath, licence_file): @@ -300,7 +373,7 @@ def hack_for_distene_licence(filepath, licence_file): del text[num_line +1] del text[num_line +1] text_to_insert =""" try: - distene_licence_file="%s" + distene_licence_file=r"%s" if sys.version_info[0] >= 3 and sys.version_info[1] >= 5: import importlib.util spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file) @@ -322,7 +395,8 @@ def hack_for_distene_licence(filepath, licence_file): def produce_relative_env_files(config, logger, file_dir, - binaries_dir_name): + binaries_dir_name, + exe_name=None): '''Create some specific environment files for the binary package. These files use relative paths. @@ -331,9 +405,18 @@ def produce_relative_env_files(config, :param file_dir str: the directory where to put the files :param binaries_dir_name str: the name of the repository where the binaries are, in the archive. + :param exe_name str: if given generate a launcher executing exe_name :return: the list of path of the produced environment files :rtype: List ''' + + # set base mode to "no" for the archive - save current mode to restore it at the end + if "base" in config.APPLICATION: + base_setting=config.APPLICATION.base + else: + base_setting="maybe" + config.APPLICATION.base="no" + # create an environment file writer writer = src.environment.FileEnvWriter(config, logger, @@ -347,6 +430,9 @@ def produce_relative_env_files(config, shell = "bash" filename = "env_launch.sh" + if exe_name: + filename=os.path.basename(exe_name) + # Write filepath = writer.write_env_file(filename, False, # for launch @@ -354,7 +440,21 @@ def produce_relative_env_files(config, for_package = binaries_dir_name) # Little hack to put out_dir_Path as environment variable - src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' ) + if src.architecture.is_windows() : + src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' ) + src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' ) + src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' ) + else: + src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' ) + src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' ) + + if exe_name: + if src.architecture.is_windows(): + cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*" + else: + cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name + with open(filepath, "a") as exe_launcher: + exe_launcher.write(cmd) # change the rights in order to make the file executable for everybody os.chmod(filepath, @@ -366,6 +466,9 @@ def produce_relative_env_files(config, stat.S_IXGRP | stat.S_IXOTH) + # restore modified setting by its initial value + config.APPLICATION.base=base_setting + return filepath def produce_install_bin_file(config, @@ -489,6 +592,53 @@ def product_appli_creation_script(config, return tmp_file_path +def bin_products_archives(config, logger, only_vcs): + '''Prepare binary packages for all products + :param config Config: The global configuration. + :return: the error status + :rtype: bool + ''' + + logger.write("Make %s binary archives\n" % config.VARS.dist) + # Get the default directory where to put the packages + binpackage_path = os.path.join(config.APPLICATION.workdir, "PACKAGE", "products") + src.ensure_path_exists(binpackage_path) + # Get the list of product installation to add to the archive + l_products_name = sorted(config.APPLICATION.products.keys()) + l_product_info = src.product.get_products_infos(l_products_name, + config) + # first loop on products : filter products, analyse properties, + # and store the information that will be used to create the archive in the second loop + l_not_installed=[] # store not installed products for warning at the end + for prod_name, prod_info in l_product_info: + # ignore the native and fixed products for install directories + if (src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes" + or src.product.product_is_native(prod_info) + or src.product.product_is_fixed(prod_info) + or not src.product.product_compiles(prod_info)): + continue + if only_vcs and not src.product.product_is_vcs(prod_info): + continue + if not src.product.check_installation(config, prod_info): + l_not_installed.append(prod_name) + continue # product is not installed, we skip it + # prepare call to make_bin_archive + path_targz_prod = os.path.join(binpackage_path, prod_name + '-' + prod_info.version + "-" + config.VARS.dist + PACKAGE_EXT) + targz_prod = tarfile.open(path_targz_prod, mode='w:gz') + bin_path = prod_info.install_dir + targz_prod.add(bin_path) + targz_prod.close() + # Python program to find MD5 hash value of a file + import hashlib + with open(path_targz_prod,"rb") as f: + bytes = f.read() # read file as bytes + readable_hash = hashlib.md5(bytes).hexdigest(); + with open(path_targz_prod+".md5", "w") as md5sum: + md5sum.write("%s %s" % (readable_hash, os.path.basename(path_targz_prod))) + logger.write(" archive : %s (md5sum = %s)\n" % (path_targz_prod, readable_hash)) + + return 0 + def binary_package(config, logger, options, tmp_working_dir): '''Prepare a dictionary that stores all the needed directories and files to add in a binary package. @@ -509,6 +659,11 @@ def binary_package(config, logger, options, tmp_working_dir): l_products_name = sorted(config.APPLICATION.products.keys()) l_product_info = src.product.get_products_infos(l_products_name, config) + + # suppress compile time products for binaries-only archives + if not options.sources: + update_config(config, logger, "compile_time", "yes") + l_install_dir = [] l_source_dir = [] l_not_installed = [] @@ -520,6 +675,8 @@ def binary_package(config, logger, options, tmp_working_dir): config.APPLICATION.properties.mesa_launcher_in_package == "yes") : generate_mesa_launcher=True + # first loop on products : filter products, analyse properties, + # and store the information that will be used to create the archive in the second loop for prod_name, prod_info in l_product_info: # skip product with property not_in_package set to yes if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes": @@ -539,8 +696,13 @@ def binary_package(config, logger, options, tmp_working_dir): or src.product.product_is_fixed(prod_info) or not src.product.product_compiles(prod_info)): continue + # + # products with single_dir property will be installed in the PRODUCTS directory of the archive + is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \ + src.product.product_test_property(prod_info,"single_install_dir", "yes")) if src.product.check_installation(config, prod_info): - l_install_dir.append((prod_name, prod_info.install_dir)) + l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir, + is_single_dir, prod_info.install_mode)) else: l_not_installed.append(prod_name) @@ -552,7 +714,7 @@ def binary_package(config, logger, options, tmp_working_dir): config.INTERNAL.config.install_dir, name_cpp) if os.path.exists(install_dir): - l_install_dir.append((name_cpp, install_dir)) + l_install_dir.append((name_cpp, name_cpp, install_dir, False, "value")) else: l_not_installed.append(name_cpp) @@ -577,13 +739,13 @@ WARNING: existing binaries directory from previous detar installation: if len(l_not_installed) > 0: text_missing_prods = "" for p_name in l_not_installed: - text_missing_prods += "-" + p_name + "\n" + text_missing_prods += " - " + p_name + "\n" if not options.force_creation: - msg = _("ERROR: there are missing products installations:") + msg = _("ERROR: there are missing product installations:") logger.write("%s\n%s" % (src.printcolors.printcError(msg), text_missing_prods), 1) - return None + raise src.SatException(msg) else: msg = _("WARNING: there are missing products installations:") logger.write("%s\n%s" % (src.printcolors.printcWarning(msg), @@ -596,11 +758,11 @@ WARNING: existing binaries directory from previous detar installation: for p_name in l_sources_not_present: text_missing_prods += "-" + p_name + "\n" if not options.force_creation: - msg = _("ERROR: there are missing products sources:") + msg = _("ERROR: there are missing product sources:") logger.write("%s\n%s" % (src.printcolors.printcError(msg), text_missing_prods), 1) - return None + raise src.SatException(msg) else: msg = _("WARNING: there are missing products sources:") logger.write("%s\n%s" % (src.printcolors.printcWarning(msg), @@ -608,19 +770,38 @@ WARNING: existing binaries directory from previous detar installation: 1) # construct the name of the directory that will contain the binaries - binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist - + if src.architecture.is_windows(): + binaries_dir_name = config.INTERNAL.config.binary_dir + else: + binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist # construct the correlation table between the product names, there # actual install directories and there install directory in archive d_products = {} - for prod_name, install_dir in l_install_dir: - path_in_archive = os.path.join(binaries_dir_name, os.path.basename(install_dir)) + for prod_name, prod_info_name, install_dir, is_single_dir, install_mode in l_install_dir: + prod_base_name=os.path.basename(install_dir) + if install_mode == "base": + # case of a products installed in base. + # because the archive is in base:no mode, the name of the install dir is different inside archive + # we set it to the product name or by PRODUCTS if single-dir + if is_single_dir: + prod_base_name=config.INTERNAL.config.single_install_dir + else: + prod_base_name=prod_info_name + path_in_archive = os.path.join(binaries_dir_name, prod_base_name) d_products[prod_name + " (bin)"] = (install_dir, path_in_archive) for prod_name, source_dir in l_source_dir: path_in_archive = os.path.join("SOURCES", prod_name) d_products[prod_name + " (sources)"] = (source_dir, path_in_archive) + # create an archives of compilation logs, and insert it into the tarball + logpath=os.path.join(config.APPLICATION.workdir, "LOGS") + path_targz_logs = os.path.join(tmp_working_dir, "logs.tgz") + tar_log = tarfile.open(path_targz_logs, mode='w:gz') + tar_log.add(logpath, arcname="LOGS") + tar_log.close() + d_products["LOGS"] = (path_targz_logs, "logs.tgz") + # for packages of SALOME applications including KERNEL, # we produce a salome launcher or a virtual application (depending on salome version) if 'KERNEL' in config.APPLICATION.products: @@ -666,7 +847,13 @@ WARNING: existing binaries directory from previous detar installation: if options.sources: # if we mix binaries and sources, we add a copy of the launcher, # prefixed with "bin",in order to avoid clashes - d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name) + launcher_copy_name="bin"+launcher_name + launcher_package_copy = produce_relative_launcher(config, + logger, + tmp_working_dir, + launcher_copy_name, + binaries_dir_name) + d_products["launcher (copy)"] = (launcher_package_copy, launcher_copy_name) else: # Provide a script for the creation of an application EDF style appli_script = product_appli_creation_script(config, @@ -688,6 +875,21 @@ WARNING: existing binaries directory from previous detar installation: filename = "env_launch.sh" d_products["environment file"] = (env_file, filename) + # If option exe, produce an extra launcher based on specified exe + if options.exe: + exe_file = produce_relative_env_files(config, + logger, + tmp_working_dir, + binaries_dir_name, + options.exe) + + if src.architecture.is_windows(): + filename = os.path.basename(options.exe) + ".bat" + else: + filename = os.path.basename(options.exe) + ".sh" + d_products["exe file"] = (exe_file, filename) + + return d_products def source_package(sat, config, logger, options, tmp_working_dir): @@ -737,23 +939,26 @@ def source_package(sat, config, logger, options, tmp_working_dir): # Add salomeTools tmp_sat = add_salomeTools(config, tmp_working_dir) - d_sat = {"salomeTools" : (tmp_sat, "salomeTools")} + d_sat = {"salomeTools" : (tmp_sat, "sat")} # Add a sat symbolic link if not win if not src.architecture.is_windows(): - tmp_satlink_path = os.path.join(tmp_working_dir, 'sat') try: t = os.getcwd() except: # In the jobs, os.getcwd() can fail t = config.LOCAL.workdir os.chdir(tmp_working_dir) - if os.path.lexists(tmp_satlink_path): - os.remove(tmp_satlink_path) - os.symlink(os.path.join('salomeTools', 'sat'), 'sat') + + # create a symlink, to avoid reference with "salomeTool/.." + os.chdir("PROJECT") + if os.path.lexists("ARCHIVES"): + os.remove("ARCHIVES") + os.symlink("../ARCHIVES", "ARCHIVES") os.chdir(t) - d_sat["sat link"] = (tmp_satlink_path, "sat") + d_sat["sat archive link"] = (os.path.join(tmp_working_dir,"PROJECT", "ARCHIVES"), + os.path.join("PROJECT", "ARCHIVES")) d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat) return d_source @@ -910,6 +1115,24 @@ def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir): # DBG.write("END sat config", sat.cfg.APPLICATION, True) return d_archives_vcs +def make_bin_archive(prod_name, prod_info, where): + '''Create an archive of a product by searching its source directory. + + :param prod_name str: The name of the product. + :param prod_info Config: The specific configuration corresponding to the + product + :param where str: The path of the repository where to put the resulting + archive + :return: The path of the resulting archive + :rtype: str + ''' + path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT) + tar_prod = tarfile.open(path_targz_prod, mode='w:gz') + bin_path = prod_info.install_dir + tar_prod.add(bin_path, arcname=path_targz_prod) + tar_prod.close() + return path_targz_prod + def make_archive(prod_name, prod_info, where): '''Create an archive of a product by searching its source directory. @@ -921,12 +1144,17 @@ def make_archive(prod_name, prod_info, where): :return: The path of the resulting archive :rtype: str ''' - path_targz_prod = os.path.join(where, prod_name + ".tgz") + path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT) tar_prod = tarfile.open(path_targz_prod, mode='w:gz') local_path = prod_info.source_dir - tar_prod.add(local_path, - arcname=prod_name, - exclude=exclude_VCS_and_extensions) + if old_python: + tar_prod.add(local_path, + arcname=prod_name, + exclude=exclude_VCS_and_extensions_26) + else: + tar_prod.add(local_path, + arcname=prod_name, + filter=exclude_VCS_and_extensions) tar_prod.close() return path_targz_prod @@ -1009,7 +1237,10 @@ def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp): patches_tmp_dir, products_pyconf_tmp_dir) - find_application_pyconf(config, application_tmp_dir) + # for the application pyconf, we write directly the config + # don't search for the original pyconf file + # to avoid problems with overwrite sections and rm_products key + write_application_pyconf(config, application_tmp_dir) d_project = {"project" : (project_tmp_dir, PROJECT_DIR )} return d_project @@ -1085,6 +1316,16 @@ def find_product_scripts_and_pyconf(p_name, product_pyconf_cfg[section].archive_info.archive_name =\ p_info.name + ".tgz" + if (with_vcs) and src.product.product_is_vcs(p_info): + # in vcs mode we must replace explicitely the git server url + # (or it will not be found later because project files are not exported in archives) + for section in product_pyconf_cfg: + # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info) + if "git_info" in product_pyconf_cfg[section]: + for repo in product_pyconf_cfg[section].git_info: + if repo in p_info.git_info: + product_pyconf_cfg[section].git_info[repo] = p_info.git_info[repo] + # write the pyconf file to the temporary project location product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir, p_name + ".pyconf") @@ -1093,43 +1334,35 @@ def find_product_scripts_and_pyconf(p_name, product_pyconf_cfg.__save__(ff, 1) ff.close() -def find_application_pyconf(config, application_tmp_dir): - '''Find the application pyconf file and put it in the specific temporary + +def write_application_pyconf(config, application_tmp_dir): + '''Write the application pyconf file in the specific temporary directory containing the specific project of a source package. :param config Config: The global configuration. :param application_tmp_dir str: The path to the temporary application - scripts directory of the project. + scripts directory of the project. ''' - # read the pyconf of the application application_name = config.VARS.application - application_pyconf_path = src.find_file_in_lpath( - application_name + ".pyconf", - config.PATHS.APPLICATIONPATH) - application_pyconf_cfg = src.pyconf.Config(application_pyconf_path) - - # Change the workdir - application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference( - application_pyconf_cfg, - src.pyconf.DOLLAR, - 'VARS.salometoolsway + $VARS.sep + ".."') - - # Prevent from compilation in base - application_pyconf_cfg.APPLICATION.no_base = "yes" - - #remove products that are not in config (which were filtered by --without_properties) - for product_name in application_pyconf_cfg.APPLICATION.products.keys(): - if product_name not in config.APPLICATION.products.keys(): - application_pyconf_cfg.APPLICATION.products.__delitem__(product_name) - # write the pyconf file to the temporary application location application_tmp_pyconf_path = os.path.join(application_tmp_dir, application_name + ".pyconf") - - ff = open(application_tmp_pyconf_path, 'w') - ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n") - application_pyconf_cfg.__save__(ff, 1) - ff.close() + with open(application_tmp_pyconf_path, 'w') as f: + f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n") + res = src.pyconf.Config() + app = src.pyconf.deepCopyMapping(config.APPLICATION) + + # set base mode to "no" for the archive + app.base = "no" + + # Change the workdir + app.workdir = src.pyconf.Reference( + app, + src.pyconf.DOLLAR, + 'VARS.salometoolsway + $VARS.sep + ".."') + res.addMapping("APPLICATION", app, "") + res.__save__(f, evaluated=False) + def sat_package(config, tmp_working_dir, options, logger): '''Prepare a dictionary that stores all the needed directories and files to @@ -1265,6 +1498,8 @@ In the following, $$ROOT represents the directory where you have installed SALOME (the directory where this file is located). """ + if src.architecture.is_windows(): + readme_header = readme_header.replace('$$ROOT','%ROOT%') readme_compilation_with_binaries=""" compilation based on the binaries used as prerequisites @@ -1309,6 +1544,16 @@ The procedure to do it is: if options.binaries or options.sources: d['application'] = config.VARS.application + d['BINARIES'] = config.INTERNAL.config.binary_dir + d['SEPARATOR'] = config.VARS.sep + if src.architecture.is_windows(): + d['operatingSystem'] = 'Windows' + d['PYTHON3'] = 'python3' + d['ROOT'] = '%ROOT%' + else: + d['operatingSystem'] = 'Linux' + d['PYTHON3'] = '' + d['ROOT'] = '$ROOT' f.write("# Application: " + d['application'] + "\n") if 'KERNEL' in config.APPLICATION.products: VersionSalome = src.get_salome_version(config) @@ -1329,7 +1574,7 @@ The procedure to do it is: if options.sources: f.write(src.template.substitute(readme_template_path_src, d)) - if options.binaries and options.sources: + if options.binaries and options.sources and not src.architecture.is_windows(): f.write(readme_compilation_with_binaries) if options.project: @@ -1340,7 +1585,7 @@ The procedure to do it is: return readme_path -def update_config(config, prop, value): +def update_config(config, logger, prop, value): '''Remove from config.APPLICATION.products the products that have the property given as input. :param config Config: The global config. @@ -1356,6 +1601,7 @@ def update_config(config, prop, value): l_product_to_remove.append(product_name) for product_name in l_product_to_remove: config.APPLICATION.products.__delitem__(product_name) + logger.write("Remove product %s with property %s\n" % (product_name, prop), 5) def description(): '''method that is called when salomeTools is called with --help option. @@ -1386,26 +1632,37 @@ def run(args, runner, logger): # Parse the options (options, args) = parser.parse_args(args) + # Check that a type of package is called, and only one all_option_types = (options.binaries, options.sources, options.project not in ["", None], - options.sat) + options.sat, + options.bin_products) # Check if no option for package type if all_option_types.count(True) == 0: msg = _("Error: Precise a type for the package\nUse one of the " "following options: --binaries, --sources, --project or" - " --salometools") + " --salometools, --bin_products") logger.write(src.printcolors.printcError(msg), 1) logger.write("\n", 1) return 1 - + do_create_package = options.binaries or options.sources or options.project or options.sat + + if options.bin_products: + ret = bin_products_archives(runner.cfg, logger, options.with_vcs) + if ret!=0: + return ret + if not do_create_package: + return 0 + + # continue to create a tar.gz package + # The repository where to put the package if not Binary or Source package_default_path = runner.cfg.LOCAL.workdir - # if the package contains binaries or sources: - if options.binaries or options.sources: + if options.binaries or options.sources or options.bin_products: # Check that the command has been called with an application src.check_config_has_application(runner.cfg) @@ -1446,15 +1703,12 @@ Please add it in file: # Remove the products that are filtered by the --without_properties option if options.without_properties: - app = runner.cfg.APPLICATION - logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys()))) prop, value = options.without_properties - update_config(runner.cfg, prop, value) - logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys()))) + update_config(runner.cfg, logger, prop, value) # Remove from config the products that have the not_in_package property - update_config(runner.cfg, "not_in_package", "yes") - + update_config(runner.cfg, logger, "not_in_package", "yes") + # get the name of the archive or build it if options.name: if os.path.basename(options.name) == options.name: @@ -1491,8 +1745,7 @@ Please add it in file: if options.project: if options.sat: archive_name += "_" - project_name = options.project - archive_name += ("satproject_" + project_name) + archive_name += ("satproject_" + options.project) if len(archive_name)==0: # no option worked msg = _("Error: Cannot name the archive\n" @@ -1503,7 +1756,7 @@ Please add it in file: logger.write("\n", 1) return 1 - path_targz = os.path.join(dir_name, archive_name + ".tgz") + path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT) src.printcolors.print_value(logger, "Package path", path_targz, 2) @@ -1546,7 +1799,6 @@ Please add it in file: d_paths_to_substitute[source_dir]=path_in_archive d_files_to_add.update(d_bin_files_to_add) - if options.sources: d_files_to_add.update(source_package(runner, runner.cfg, @@ -1572,8 +1824,8 @@ Please add it in file: options, logger)) if options.project: - DBG.write("config for package %s" % project_name, runner.cfg) - d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger)) + DBG.write("config for package %s" % options.project, runner.cfg) + d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger)) if not(d_files_to_add): msg = _("Error: Empty dictionnary to build the archive!\n") @@ -1605,7 +1857,10 @@ Please add it in file: tar = tarfile.open(path_targz, mode='w:gz') # get the filtering function if needed - filter_function = exclude_VCS_and_extensions + if old_python: + filter_function = exclude_VCS_and_extensions_26 + else: + filter_function = exclude_VCS_and_extensions # Add the files to the tarfile object res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)