X-Git-Url: http://git.salome-platform.org/gitweb/?a=blobdiff_plain;f=commands%2Fpackage.py;h=47f2c80d74e76a06f19d00cc78d1b647a34a4cf1;hb=11c99a29802614c1c09c32e7c803959bfcc3c721;hp=ad10ab0a878ef975c50352e590078ab0327f9f5b;hpb=7132688c2b4c9e20fdb2f84fafabe044f1a35f29;p=tools%2Fsat.git diff --git a/commands/package.py b/commands/package.py index ad10ab0..47f2c80 100644 --- a/commands/package.py +++ b/commands/package.py @@ -27,6 +27,7 @@ import string import src from application import get_SALOME_modules +import src.debug as DBG BINARY = "binary" SOURCE = "Source" @@ -64,9 +65,10 @@ LOCAL_TEMPLATE = ("""#!/usr/bin/env python LOCAL : { - base : 'unknown' - workdir : 'unknown' - log_dir : 'unknown' + base : 'default' + workdir : 'default' + log_dir : 'default' + archive_dir : 'default' VCS : None tag : None } @@ -126,14 +128,14 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None): success = 0 # loop over each directory or file stored in the d_content dictionary - for name in d_content.keys(): + for name in sorted(d_content.keys()): # display information len_points = max_len - len(name) - logger.write(name + " " + len_points * "." + " ", 3) - # Get the local path and the path in archive - # of the directory or file to add local_path, archive_path = d_content[name] in_archive = os.path.join(name_archive, archive_path) + logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3) + # Get the local path and the path in archive + # of the directory or file to add # Add it in the archive try: tar.add(local_path, arcname=in_archive, exclude=f_exclude) @@ -180,16 +182,41 @@ def produce_relative_launcher(config, :rtype: str ''' - # Get the launcher template - profile_install_dir = os.path.join(binaries_dir_name, - config.APPLICATION.profile.product) + # get KERNEL installation path + kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL") + + # set kernel bin dir (considering fhs property) + kernel_cfg = src.product.get_product_config(config, "KERNEL") + if src.get_property_in_product_cfg(kernel_cfg, "fhs"): + bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") + else: + bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") + + # check if the application contains an application module + l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), + config) + salome_application_name="Not defined" + for prod_name, prod_info in l_product_info: + # look for a salome application + if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes": + salome_application_name=prod_info.name + continue + # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it + # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test + if salome_application_name == "Not defined": + app_root_dir=kernel_root_dir + else: + app_root_dir=os.path.join(binaries_dir_name, salome_application_name) + + # Get the launcher template and do substitutions withProfile = src.fileEnviron.withProfile + withProfile = withProfile.replace( - "ABSOLUTE_APPLI_PATH'] = 'PROFILE_INSTALL_DIR'", - "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + profile_install_dir + "'") + "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'", + "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + app_root_dir + "'") withProfile = withProfile.replace( - "os.path.join( 'PROFILE_INSTALL_DIR'", - "os.path.join( out_dir_Path, '" + profile_install_dir + "'") + " 'BIN_KERNEL_INSTALL_DIR'", + " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'") before, after = withProfile.split( "# here your local standalone environment\n") @@ -250,6 +277,8 @@ def hack_for_distene_licence(filepath): if num_line == -1: # No distene product, there is nothing to do fin.close() + for line in text: + fout.write(line) fout.close() return del text[num_line +1] @@ -445,13 +474,14 @@ def binary_package(config, logger, options, tmp_working_dir): ''' # Get the list of product installation to add to the archive - l_products_name = config.APPLICATION.products.keys() + l_products_name = sorted(config.APPLICATION.products.keys()) l_product_info = src.product.get_products_infos(l_products_name, config) l_install_dir = [] l_source_dir = [] l_not_installed = [] l_sources_not_present = [] + generate_mesa_launcher = False # a flag to know if we generate a mesa launcher for prod_name, prod_info in l_product_info: # Add the sources of the products that have the property @@ -463,6 +493,10 @@ def binary_package(config, logger, options, tmp_working_dir): else: l_sources_not_present.append(prod_name) + # if at least one of the application products has the "is_mesa" property + if src.get_property_in_product_cfg(prod_info, "is_mesa") == "yes": + generate_mesa_launcher = True # we will generate a mesa launcher + # ignore the native and fixed products for install directories if (src.product.product_is_native(prod_info) or src.product.product_is_fixed(prod_info) @@ -484,6 +518,21 @@ def binary_package(config, logger, options, tmp_working_dir): else: l_not_installed.append(name_cpp) + # check the name of the directory that (could) contains the binaries + # from previous detar + binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist) + if os.path.exists(binaries_from_detar): + logger.write(""" +WARNING: existing binaries directory from previous detar installation: + %s + To make new package from this, you have to: + 1) install binaries in INSTALL directory with the script "install_bin.sh" + see README file for more details + 2) or recompile everything in INSTALL with "sat compile" command + this step is long, and requires some linux packages to be installed + on your system\n +""" % binaries_from_detar) + # Print warning or error if there are some missing products if len(l_not_installed) > 0: text_missing_prods = "" @@ -532,30 +581,62 @@ def binary_package(config, logger, options, tmp_working_dir): path_in_archive = os.path.join("SOURCES", prod_name) d_products[prod_name + " (sources)"] = (source_dir, path_in_archive) - # create the relative launcher and add it to the files to add - if ("profile" in config.APPLICATION and - "product" in config.APPLICATION.profile): - launcher_name = config.APPLICATION.profile.launcher_name - launcher_package = produce_relative_launcher(config, - logger, - tmp_working_dir, - launcher_name, - binaries_dir_name, - not(options.without_commercial)) - - d_products["launcher"] = (launcher_package, launcher_name) - if options.sources: - # if we mix binaries and sources, we add a copy of the launcher, - # prefixed with "bin",in order to avoid clashes - d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name) - else: - # Provide a script for the creation of an application EDF style - appli_script = product_appli_creation_script(config, - logger, - tmp_working_dir, - binaries_dir_name) - - d_products["appli script"] = (appli_script, "create_appli.py") + # for packages of SALOME applications including KERNEL, + # we produce a salome launcher or a virtual application (depending on salome version) + if 'KERNEL' in config.APPLICATION.products: + VersionSalome = src.get_salome_version(config) + # Case where SALOME has the launcher that uses the SalomeContext API + if VersionSalome >= 730: + # create the relative launcher and add it to the files to add + launcher_name = src.get_launcher_name(config) + launcher_package = produce_relative_launcher(config, + logger, + tmp_working_dir, + launcher_name, + binaries_dir_name, + not(options.without_commercial)) + d_products["launcher"] = (launcher_package, launcher_name) + + # if the application contains mesa products, we generate in addition to the + # classical salome launcher a launcher using mesa and called mesa_salome + # (the mesa launcher will be used for remote usage through ssh). + if generate_mesa_launcher: + #if there is one : store the use_mesa property + restore_use_mesa_option=None + if ('properties' in config.APPLICATION and + 'use_mesa' in config.APPLICATION.properties): + restore_use_mesa_option = config.APPLICATION.properties.use_mesa + + # activate mesa property, and generate a mesa launcher + src.activate_mesa_property(config) #activate use_mesa property + launcher_mesa_name="mesa_"+launcher_name + launcher_package_mesa = produce_relative_launcher(config, + logger, + tmp_working_dir, + launcher_mesa_name, + binaries_dir_name, + not(options.without_commercial)) + d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name) + + # if there was a use_mesa value, we restore it + # else we set it to the default value "no" + if restore_use_mesa_option != None: + config.APPLICATION.properties.use_mesa=restore_use_mesa_option + else: + config.APPLICATION.properties.use_mesa="no" + + if options.sources: + # if we mix binaries and sources, we add a copy of the launcher, + # prefixed with "bin",in order to avoid clashes + d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name) + else: + # Provide a script for the creation of an application EDF style + appli_script = product_appli_creation_script(config, + logger, + tmp_working_dir, + binaries_dir_name) + + d_products["appli script"] = (appli_script, "create_appli.py") # Put also the environment file env_file = produce_relative_env_files(config, @@ -617,7 +698,7 @@ def source_package(sat, config, logger, options, tmp_working_dir): t = os.getcwd() except: # In the jobs, os.getcwd() can fail - t = config.USER.workdir + t = config.LOCAL.workdir os.chdir(tmp_working_dir) if os.path.lexists(tmp_satlink_path): os.remove(tmp_satlink_path) @@ -682,6 +763,17 @@ def add_salomeTools(config, tmp_working_dir): local_pyconf_name = "local.pyconf" local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data") local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name) + # Remove the .pyconf file in the root directory of salomeTools if there is + # any. (For example when launching jobs, a pyconf file describing the jobs + # can be here and is not useful) + files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools")) + for file_or_dir in files_or_dir_SAT: + if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"): + file_path = os.path.join(tmp_working_dir, + "salomeTools", + file_or_dir) + os.remove(file_path) + ff = open(local_pyconf_file, "w") ff.write(LOCAL_TEMPLATE) ff.close() @@ -710,25 +802,38 @@ def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir): # clean the source directory of all the vcs products, then use the source # command and thus construct an archive that will not contain the patches l_prod_names = [pn for pn, __ in l_pinfo_vcs] - # clean - logger.write(_("clean sources\n")) - args_clean = config.VARS.application - args_clean += " --sources --products " - args_clean += ",".join(l_prod_names) - sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger) - # source - logger.write(_("get sources")) - args_source = config.VARS.application - args_source += " --products " - args_source += ",".join(l_prod_names) - sat.source(args_source, batch=True, verbose=0, logger_add_link = logger) - - # make the new archives - d_archives_vcs = {} - for pn, pinfo in l_pinfo_vcs: - path_archive = make_archive(pn, pinfo, tmp_working_dir) - d_archives_vcs[pn] = (path_archive, - os.path.join(ARCHIVE_DIR, pn + ".tgz")) + if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir + logger.write(_("\nclean sources\n")) + args_clean = config.VARS.application + args_clean += " --sources --products " + args_clean += ",".join(l_prod_names) + logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1) + sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger) + if True: + # source + logger.write(_("get sources\n")) + args_source = config.VARS.application + args_source += " --products " + args_source += ",".join(l_prod_names) + svgDir = sat.cfg.APPLICATION.workdir + tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp + sat.cfg.APPLICATION.workdir = tmp_local_working_dir + # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True) + # DBG.write("sat config id", id(sat.cfg), True) + # shit as config is not same id() as for sat.source() + # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger) + import source + source.run(args_source, sat, logger) #use this mode as runner.cfg reference + + # make the new archives + d_archives_vcs = {} + for pn, pinfo in l_pinfo_vcs: + path_archive = make_archive(pn, pinfo, tmp_local_working_dir) + logger.write("make archive vcs '%s'\n" % path_archive) + d_archives_vcs[pn] = (path_archive, + os.path.join(ARCHIVE_DIR, pn + ".tgz")) + sat.cfg.APPLICATION.workdir = svgDir + # DBG.write("END sat config", sat.cfg.APPLICATION, True) return d_archives_vcs def make_archive(prod_name, prod_info, where): @@ -745,7 +850,9 @@ def make_archive(prod_name, prod_info, where): path_targz_prod = os.path.join(where, prod_name + ".tgz") tar_prod = tarfile.open(path_targz_prod, mode='w:gz') local_path = prod_info.source_dir - tar_prod.add(local_path, arcname=prod_name) + tar_prod.add(local_path, + arcname=prod_name, + exclude=exclude_VCS_and_extensions) tar_prod.close() return path_targz_prod @@ -926,7 +1033,7 @@ def find_application_pyconf(config, application_tmp_dir): application_pyconf_cfg.__save__(ff, 1) ff.close() -def project_package(project_file_path, tmp_working_dir): +def project_package(config, name_project, project_file_path, tmp_working_dir, logger): '''Prepare a dictionary that stores all the needed directories and files to add in a project package. @@ -941,7 +1048,15 @@ def project_package(project_file_path, tmp_working_dir): ''' d_project = {} # Read the project file and get the directories to add to the package - project_pyconf_cfg = src.pyconf.Config(project_file_path) + + try: + project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project) + except: + logger.write(""" +WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path)) + project_pyconf_cfg = src.pyconf.Config(project_file_path) + project_pyconf_cfg.PWD = os.path.dirname(project_file_path) + paths = {"ARCHIVEPATH" : "archives", "APPLICATIONPATH" : "applications", "PRODUCTPATH" : "products", @@ -999,7 +1114,7 @@ SALOME (the directory where this file is located). compilation based on the binaries used as prerequisites ======================================================= -If you fail to compile the the complete application (for example because +If you fail to compile the complete application (for example because you are not root on your system and cannot install missing packages), you may try a partial compilation based on the binaries. For that it is necessary to copy the binaries from BINARIES to INSTALL, @@ -1015,10 +1130,12 @@ The procedure to do it is: """ readme_header_tpl=string.Template(readme_header) - readme_template_path_bin_prof = os.path.join(config.VARS.internal_dir, + readme_template_path_bin = os.path.join(config.VARS.internal_dir, "README_BIN.template") - readme_template_path_bin_noprof = os.path.join(config.VARS.internal_dir, - "README_BIN_NO_PROFILE.template") + readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir, + "README_LAUNCHER.template") + readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir, + "README_BIN_VIRTUAL_APP.template") readme_template_path_src = os.path.join(config.VARS.internal_dir, "README_SRC.template") readme_template_path_pro = os.path.join(config.VARS.internal_dir, @@ -1036,19 +1153,22 @@ The procedure to do it is: if options.binaries or options.sources: d['application'] = config.VARS.application - f.write("# Application: " + d['application']) - if 'profile' in config.APPLICATION: - d['launcher'] = config.APPLICATION.profile.launcher_name - d['launcher'] = config.APPLICATION.profile.launcher_name - else: - d['env_file'] = 'env_launch.sh' + f.write("# Application: " + d['application'] + "\n") + if 'KERNEL' in config.APPLICATION.products: + VersionSalome = src.get_salome_version(config) + # Case where SALOME has the launcher that uses the SalomeContext API + if VersionSalome >= 730: + d['launcher'] = config.APPLICATION.profile.launcher_name + else: + d['virtual_app'] = 'runAppli' # this info is not used now) # write the specific sections if options.binaries: - if "env_file" in d: - f.write(src.template.substitute(readme_template_path_bin_noprof, d)) - else: - f.write(src.template.substitute(readme_template_path_bin_prof, d)) + f.write(src.template.substitute(readme_template_path_bin, d)) + if "virtual_app" in d: + f.write(src.template.substitute(readme_template_path_bin_virtapp, d)) + if "launcher" in d: + f.write(src.template.substitute(readme_template_path_bin_launcher, d)) if options.sources: f.write(src.template.substitute(readme_template_path_src, d)) @@ -1086,14 +1206,21 @@ def description(): :return: The text to display for the package command description. :rtype: str ''' - return _("The package command creates an archive.\nThere are 4 kinds of " - "archive, which can be mixed:\n 1- The binary archive. It contains all the product " - "installation directories and a launcher,\n 2- The sources archive." - " It contains the products archives, a project corresponding to " - "the application and salomeTools,\n 3- The project archive. It " - "contains a project (give the project file path as argument),\n 4-" - " The salomeTools archive. It contains salomeTools.\n\nexample:" - "\nsat package SALOME-master --bineries --sources") + return _(""" +The package command creates a tar file archive of a product. +There are four kinds of archive, which can be mixed: + + 1 - The binary archive. + It contains the product installation directories plus a launcher. + 2 - The sources archive. + It contains the product archives, a project (the application plus salomeTools). + 3 - The project archive. + It contains a project (give the project file path as argument). + 4 - The salomeTools archive. + It contains code utility salomeTools. + +example: + >> sat package SALOME-master --binaries --sources""") def run(args, runner, logger): '''method that is called when salomeTools is called with package parameter. @@ -1101,7 +1228,7 @@ def run(args, runner, logger): # Parse the options (options, args) = parser.parse_args(args) - + # Check that a type of package is called, and only one all_option_types = (options.binaries, options.sources, @@ -1118,7 +1245,7 @@ def run(args, runner, logger): return 1 # The repository where to put the package if not Binary or Source - package_default_path = runner.cfg.USER.workdir + package_default_path = runner.cfg.LOCAL.workdir # if the package contains binaries or sources: if options.binaries or options.sources: @@ -1137,16 +1264,31 @@ def run(args, runner, logger): # if the package contains a project: if options.project: # check that the project is visible by SAT - if options.project not in runner.cfg.PROJECTS.project_file_paths: + projectNameFile = options.project + ".pyconf" + foundProject = None + for i in runner.cfg.PROJECTS.project_file_paths: + baseName = os.path.basename(i) + if baseName == projectNameFile: + foundProject = i + break + + if foundProject is None: local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf") - msg = _("ERROR: the project %(proj)s is not visible by salomeTools." - "\nPlease add it in the %(local)s file." % { - "proj" : options.project, "local" : local_path}) + msg = _("""ERROR: the project %(1)s is not visible by salomeTools. +known projects are: +%(2)s + +Please add it in file: +%(3)s""" % \ + {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path}) logger.write(src.printcolors.printcError(msg), 1) logger.write("\n", 1) return 1 + else: + options.project_file_path = foundProject + src.printcolors.print_value(logger, "Project path", options.project_file_path, 2) # Remove the products that are filtered by the --without_property option if options.without_property: @@ -1176,17 +1318,16 @@ def run(args, runner, logger): archive_name = runner.cfg.APPLICATION.name if options.binaries: - archive_name += "_"+runner.cfg.VARS.dist + archive_name += "-"+runner.cfg.VARS.dist if options.sources: - archive_name += "_SRC" + archive_name += "-SRC" if options.with_vcs: - archive_name += "_VCS" + archive_name += "-VCS" if options.project: - project_name, __ = os.path.splitext( - os.path.basename(options.project)) - archive_name += ("PROJECT_" + project_name) + project_name = options.project + archive_name += ("PROJECT-" + project_name) if options.sat: archive_name += ("salomeTools_" + runner.cfg.INTERNAL.sat_version) @@ -1216,7 +1357,7 @@ def run(args, runner, logger): msg = _("Preparation of files to add to the archive") logger.write(src.printcolors.printcLabel(msg), 2) logger.write("\n", 2) - + d_files_to_add={} # content of the archive # a dict to hold paths that will need to be substitute for users recompilations @@ -1265,9 +1406,9 @@ def run(args, runner, logger): if options.sat: d_files_to_add.update({"salomeTools" : (runner.cfg.VARS.salometoolsway, "")}) - if options.project: - d_files_to_add.update(project_package(options.project, tmp_working_dir)) + DBG.write("config for package %s" % project_name, runner.cfg) + d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, tmp_working_dir, logger)) if not(d_files_to_add): msg = _("Error: Empty dictionnary to build the archive!\n") @@ -1300,7 +1441,6 @@ def run(args, runner, logger): tar = tarfile.open(path_targz, mode='w:gz') # get the filtering function if needed - filter_function = None filter_function = exclude_VCS_and_extensions # Add the files to the tarfile object @@ -1308,15 +1448,16 @@ def run(args, runner, logger): tar.close() except KeyboardInterrupt: logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1) - logger.write(_("Removing the temporary working directory ... "), 1) + logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1) # remove the working directory shutil.rmtree(tmp_working_dir) logger.write(_("OK"), 1) logger.write(_("\n"), 1) return 1 - # remove the working directory - shutil.rmtree(tmp_working_dir) + # remove the working directory + DBG.tofix("make shutil.rmtree(tmp_working_dir) effective", "", True) + # shutil.rmtree(tmp_working_dir) # Print again the path of the package logger.write("\n", 2)