3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
31 from application import get_SALOME_modules
32 from src.versionMinorMajorPatch import MinorMajorPatch as MMP
33 import src.debug as DBG
35 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
42 ARCHIVE_DIR = "ARCHIVES"
43 PROJECT_DIR = "PROJECT"
45 IGNORED_DIRS = [".git", ".svn"]
46 IGNORED_EXTENSIONS = []
48 PACKAGE_EXT = ".tar.gz" # the extension we use for the packages
50 if src.architecture.is_windows():
51 PROJECT_TEMPLATE = """#!/usr/bin/env python
54 # The path to the archive root directory
55 root_path : $PWD + "/../"
57 project_path : $PWD + "/"
59 # Where to search the archives of the products
60 ARCHIVEPATH : $root_path + "ARCHIVES"
61 # Where to search the pyconf of the applications
62 APPLICATIONPATH : $project_path + "applications/"
63 # Where to search the pyconf of the products
64 PRODUCTPATH : $project_path + "products/"
65 # Where to search the pyconf of the jobs of the project
66 JOBPATH : $project_path + "jobs/"
67 # Where to search the pyconf of the machines of the project
68 MACHINEPATH : $project_path + "machines/"
71 PROJECT_TEMPLATE = """#!/usr/bin/env python
75 project_path : $PWD + "/"
77 # Where to search the archives of the products
78 ARCHIVEPATH : $project_path + "ARCHIVES"
79 # Where to search the pyconf of the applications
80 APPLICATIONPATH : $project_path + "applications/"
81 # Where to search the pyconf of the products
82 PRODUCTPATH : $project_path + "products/"
83 # Where to search the pyconf of the jobs of the project
84 JOBPATH : $project_path + "jobs/"
85 # Where to search the pyconf of the machines of the project
86 MACHINEPATH : $project_path + "machines/"
91 """#!/usr/bin/env python
99 archive_dir : 'default'
108 $LOCAL.workdir + $VARS.sep + \""""
110 + """\" + $VARS.sep + "project.pyconf"
116 # Define all possible option for the package command : sat package <options>
117 parser = src.options.Options()
123 _("Optional: Produce a binary package."),
132 "Optional: Only binary package: produce the archive even if "
133 "there are some missing products."
142 _("Optional: Produce a compilable archive of the sources of the " "application."),
150 _("Optional: Create binary archives for all products."),
159 "Optional: Do not make archive for products in VCS mode (git, cvs, svn). "
160 "Sat prepare will use VCS mode instead to retrieve them."
161 '\n Also, when combined with "--bin_products" option, restrict the building of product archives to VCS products.'
171 "Optional: Do not embed archives for products in archive mode."
172 "Sat prepare will use ftp instead to retrieve them"
181 _("Optional: Produce an extra launcher based upon the exe given as argument."),
189 _("Optional: Produce an archive that contains a project."),
197 _("Optional: Produce an archive that contains salomeTools."),
205 _("Optional: The name or full path of the archive."),
213 _("Optional: The list of additional files to add to the archive."),
218 "without_properties",
220 "without_properties",
222 "Optional: Filter the products by their properties.\n\tSyntax: "
223 "--without_properties <property>:<value>"
228 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
229 """Create an archive containing all directories and files that are given in
230 the d_content argument.
232 :param tar tarfile: The tarfile instance used to make the archive.
233 :param name_archive str: The name of the archive to make.
234 :param d_content dict: The dictionary that contain all directories and files
235 to add in the archive.
237 (path_on_local_machine, path_in_archive)
238 :param logger Logger: the logging instance
239 :param f_exclude Function: the function that filters
240 :return: 0 if success, 1 if not.
243 # get the max length of the messages in order to make the display
244 max_len = len(max(d_content.keys(), key=len))
247 # loop over each directory or file stored in the d_content dictionary
248 names = sorted(d_content.keys())
249 DBG.write("add tar names", names)
251 # used to avoid duplications (for pip install in python, or single_install_dir cases)
252 already_added = set()
254 # display information
255 len_points = max_len - len(name) + 3
256 local_path, archive_path = d_content[name]
257 in_archive = os.path.join(name_archive, archive_path)
258 logger.write(name + " " + len_points * "." + " " + in_archive + " ", 3)
259 # Get the local path and the path in archive
260 # of the directory or file to add
261 # Add it in the archive
263 key = local_path + "->" + in_archive
264 if key not in already_added:
269 exclude=exclude_VCS_and_extensions_26,
275 filter=exclude_VCS_and_extensions,
277 already_added.add(key)
278 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
279 except Exception as e:
280 logger.write(src.printcolors.printcError(_("KO ")), 3)
281 logger.write(str(e), 3)
283 logger.write("\n", 3)
287 def exclude_VCS_and_extensions_26(filename):
288 """The function that is used to exclude from package the link to the
289 VCS repositories (like .git) (only for python 2.6)
291 :param filename Str: The filname to exclude (or not).
292 :return: True if the file has to be exclude
295 for dir_name in IGNORED_DIRS:
296 if dir_name in filename:
298 for extension in IGNORED_EXTENSIONS:
299 if filename.endswith(extension):
304 def exclude_VCS_and_extensions(tarinfo):
305 """The function that is used to exclude from package the link to the
306 VCS repositories (like .git)
308 :param filename Str: The filname to exclude (or not).
309 :return: None if the file has to be exclude
310 :rtype: tarinfo or None
312 filename = tarinfo.name
313 for dir_name in IGNORED_DIRS:
314 if dir_name in filename:
316 for extension in IGNORED_EXTENSIONS:
317 if filename.endswith(extension):
322 def produce_relative_launcher(config, logger, file_dir, file_name, binaries_dir_name):
323 """Create a specific SALOME launcher for the binary package. This launcher
326 :param config Config: The global configuration.
327 :param logger Logger: the logging instance
328 :param file_dir str: the directory where to put the launcher
329 :param file_name str: The launcher name
330 :param binaries_dir_name str: the name of the repository where the binaries
332 :return: the path of the produced launcher
336 # set base mode to "no" for the archive - save current mode to restore it at the end
337 if "base" in config.APPLICATION:
338 base_setting = config.APPLICATION.base
340 base_setting = "maybe"
341 config.APPLICATION.base = "no"
343 # get KERNEL installation path
344 kernel_info = src.product.get_product_config(config, "KERNEL")
345 kernel_base_name = os.path.basename(kernel_info.install_dir)
346 if kernel_info.install_mode == "base":
347 # case of kernel installed in base. the kernel install dir name is different in the archive
348 kernel_base_name = os.path.basename(os.path.dirname(kernel_info.install_dir))
350 kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
352 # set kernel bin dir (considering fhs property)
353 kernel_cfg = src.product.get_product_config(config, "KERNEL")
354 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
355 bin_kernel_install_dir = os.path.join(kernel_root_dir, "bin")
357 bin_kernel_install_dir = os.path.join(kernel_root_dir, "bin", "salome")
359 # check if the application contains an application module
360 # check also if the application has a distene product,
361 # in this case get its licence file name
362 l_product_info = src.product.get_products_infos(
363 config.APPLICATION.products.keys(), config
365 salome_application_name = "Not defined"
366 distene_licence_file_name = False
367 for prod_name, prod_info in l_product_info:
368 # look for a "salome application" and a distene product
369 if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
370 distene_licence_file_name = src.product.product_has_licence(
371 prod_info, config.PATHS.LICENCEPATH
373 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
374 salome_application_name = prod_info.name
376 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
377 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
378 if salome_application_name == "Not defined":
379 app_root_dir = kernel_root_dir
381 app_root_dir = os.path.join(binaries_dir_name, salome_application_name)
384 additional_env["sat_bin_kernel_install_dir"] = (
385 "out_dir_Path + " + config.VARS.sep + bin_kernel_install_dir
387 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
388 additional_env["sat_python_version"] = 3
390 additional_env["sat_python_version"] = 2
392 additional_env["ABSOLUTE_APPLI_PATH"] = (
393 "out_dir_Path" + config.VARS.sep + app_root_dir
395 launcher_name = src.get_launcher_name(config)
396 additional_env["APPLI"] = "out_dir_Path" + config.VARS.sep + file_name
398 # create an environment file writer
399 writer = src.environment.FileEnvWriter(
400 config, logger, file_dir, src_root=None, env_info=None
403 filepath = os.path.join(file_dir, file_name)
405 writer.write_env_file(
409 additional_env=additional_env,
411 for_package=binaries_dir_name,
414 # Little hack to put out_dir_Path outside the strings
415 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"')
416 src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'")
418 # A hack to put a call to a file for distene licence.
419 # It does nothing to an application that has no distene product
420 if distene_licence_file_name:
422 "Application has a distene licence file! We use it in package launcher", 5
424 hack_for_distene_licence(filepath, distene_licence_file_name)
426 # change the rights in order to make the file executable for everybody
438 # restore modified setting by its initial value
439 config.APPLICATION.base = base_setting
444 def hack_for_distene_licence(filepath, licence_file):
445 """Replace the distene licence env variable by a call to a file.
447 :param filepath Str: The path to the launcher to modify.
449 shutil.move(filepath, filepath + "_old")
451 filein = filepath + "_old"
452 fin = open(filein, "r")
453 fout = open(fileout, "w")
454 text = fin.readlines()
455 # Find the Distene section
457 for i, line in enumerate(text):
458 if "# Set DISTENE License" in line:
462 # No distene product, there is nothing to do
468 del text[num_line + 1]
469 del text[num_line + 1]
472 distene_licence_file=r"%s"
473 if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
474 import importlib.util
475 spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
476 distene=importlib.util.module_from_spec(spec_dist)
477 spec_dist.loader.exec_module(distene)
480 distene = imp.load_source('distene_licence', distene_licence_file)
481 distene.set_distene_variables(context)
486 text.insert(num_line + 1, text_to_insert)
494 def produce_relative_env_files(
495 config, logger, file_dir, binaries_dir_name, exe_name=None
497 """Create some specific environment files for the binary package. These
498 files use relative paths.
500 :param config Config: The global configuration.
501 :param logger Logger: the logging instance
502 :param file_dir str: the directory where to put the files
503 :param binaries_dir_name str: the name of the repository where the binaries
505 :param exe_name str: if given generate a launcher executing exe_name
506 :return: the list of path of the produced environment files
510 # set base mode to "no" for the archive - save current mode to restore it at the end
511 if "base" in config.APPLICATION:
512 base_setting = config.APPLICATION.base
514 base_setting = "maybe"
515 config.APPLICATION.base = "no"
517 # create an environment file writer
518 writer = src.environment.FileEnvWriter(config, logger, file_dir, src_root=None)
520 if src.architecture.is_windows():
522 filename = "env_launch.bat"
525 filename = "env_launch.sh"
528 filename = os.path.basename(exe_name)
531 filepath = writer.write_env_file(
532 filename, False, shell, for_package=binaries_dir_name # for launch
535 # Little hack to put out_dir_Path as environment variable
536 if src.architecture.is_windows():
537 src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%')
538 src.replace_in_file(filepath, "=out_dir_Path", "=%out_dir_Path%")
539 src.replace_in_file(filepath, ";out_dir_Path", ";%out_dir_Path%")
541 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}')
542 src.replace_in_file(filepath, ":out_dir_Path", ":${out_dir_Path}")
543 src.replace_in_file(filepath, ";out_dir_Path", ";${out_dir_Path}")
546 if src.architecture.is_windows():
547 cmd = "\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
549 cmd = '\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
550 with open(filepath, "a") as exe_launcher:
551 exe_launcher.write(cmd)
553 # change the rights in order to make the file executable for everybody
565 # restore modified setting by its initial value
566 config.APPLICATION.base = base_setting
571 def produce_install_bin_file(config, logger, file_dir, d_sub, file_name):
572 """Create a bash shell script which do substitutions in BIRARIES dir
573 in order to use it for extra compilations.
575 :param config Config: The global configuration.
576 :param logger Logger: the logging instance
577 :param file_dir str: the directory where to put the files
578 :param d_sub, dict: the dictionnary that contains the substitutions to be done
579 :param file_name str: the name of the install script file
580 :return: the produced file
584 filepath = os.path.join(file_dir, file_name)
585 # open the file and write into it
586 # use codec utf-8 as sat variables are in unicode
587 with codecs.open(filepath, "w", "utf-8") as installbin_file:
588 installbin_template_path = os.path.join(
589 config.VARS.internal_dir, "INSTALL_BIN.template"
592 # build the name of the directory that will contain the binaries
593 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
594 # build the substitution loop
595 loop_cmd = "for f in $(grep -RIl"
597 loop_cmd += " -e " + key
598 loop_cmd += " " + config.INTERNAL.config.install_dir + '); do\n sed -i "\n'
600 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
601 loop_cmd += ' " $f\ndone'
604 d["BINARIES_DIR"] = binaries_dir_name
605 d["SUBSTITUTION_LOOP"] = loop_cmd
606 d["INSTALL_DIR"] = config.INTERNAL.config.install_dir
608 # substitute the template and write it in file
609 content = src.template.substitute(installbin_template_path, d)
610 installbin_file.write(content)
611 # change the rights in order to make the file executable for everybody
626 def product_appli_creation_script(config, logger, file_dir, binaries_dir_name):
627 """Create a script that can produce an application (EDF style) in the binary
630 :param config Config: The global configuration.
631 :param logger Logger: the logging instance
632 :param file_dir str: the directory where to put the file
633 :param binaries_dir_name str: the name of the repository where the binaries
635 :return: the path of the produced script file
638 template_name = "create_appli.py.for_bin_packages.template"
639 template_path = os.path.join(config.VARS.internal_dir, template_name)
640 text_to_fill = open(template_path, "r").read()
641 text_to_fill = text_to_fill.replace("TO BE FILLED 1", '"' + binaries_dir_name + '"')
644 for product_name in get_SALOME_modules(config):
645 product_info = src.product.get_product_config(config, product_name)
647 if src.product.product_is_smesh_plugin(product_info):
650 if "install_dir" in product_info and bool(product_info.install_dir):
651 if src.product.product_is_cpp(product_info):
653 for cpp_name in src.product.get_product_components(product_info):
655 '<module name="' + cpp_name + '" gui="yes" path="\'\'\' + '
656 'os.path.join(dir_bin_name, "' + cpp_name + "\") + '''\"/>"
661 '<module name="' + product_name + '" gui="yes" path="\'\'\' + '
662 'os.path.join(dir_bin_name, "' + product_name + "\") + '''\"/>"
664 text_to_add += line_to_add + "\n"
666 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
668 tmp_file_path = os.path.join(file_dir, "create_appli.py")
669 ff = open(tmp_file_path, "w")
670 ff.write(filled_text)
673 # change the rights in order to make the file executable for everybody
688 def bin_products_archives(config, logger, only_vcs):
689 """Prepare binary packages for all products
690 :param config Config: The global configuration.
691 :return: the error status
695 logger.write("Make %s binary archives\n" % config.VARS.dist)
696 # Get the default directory where to put the packages
697 binpackage_path = os.path.join(config.APPLICATION.workdir, "PACKAGE", "products")
698 src.ensure_path_exists(binpackage_path)
699 # Get the list of product installation to add to the archive
700 l_products_name = sorted(config.APPLICATION.products.keys())
701 l_product_info = src.product.get_products_infos(l_products_name, config)
702 # first loop on products : filter products, analyse properties,
703 # and store the information that will be used to create the archive in the second loop
704 l_not_installed = [] # store not installed products for warning at the end
705 for prod_name, prod_info in l_product_info:
706 # ignore the native and fixed products for install directories
708 src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
709 or src.product.product_is_native(prod_info)
710 or src.product.product_is_fixed(prod_info)
711 or not src.product.product_compiles(prod_info)
714 if only_vcs and not src.product.product_is_vcs(prod_info):
716 if not src.product.check_installation(config, prod_info):
717 l_not_installed.append(prod_name)
718 continue # product is not installed, we skip it
719 # prepare call to make_bin_archive
720 path_targz_prod = os.path.join(
724 + prod_info.version.replace("/", "_")
729 targz_prod = tarfile.open(path_targz_prod, mode="w:gz")
730 bin_path = prod_info.install_dir
731 targz_prod.add(bin_path)
733 # Python program to find MD5 hash value of a file
736 with open(path_targz_prod, "rb") as f:
737 bytes = f.read() # read file as bytes
738 readable_hash = hashlib.md5(bytes).hexdigest()
739 with open(path_targz_prod + ".md5", "w") as md5sum:
741 "%s %s" % (readable_hash, os.path.basename(path_targz_prod))
744 " archive : %s (md5sum = %s)\n" % (path_targz_prod, readable_hash)
750 def binary_package(config, logger, options, tmp_working_dir):
751 """Prepare a dictionary that stores all the needed directories and files to
752 add in a binary package.
754 :param config Config: The global configuration.
755 :param logger Logger: the logging instance
756 :param options OptResult: the options of the launched command
757 :param tmp_working_dir str: The temporary local directory containing some
758 specific directories or files needed in the
760 :return: the dictionary that stores all the needed directories and files to
761 add in a binary package.
762 {label : (path_on_local_machine, path_in_archive)}
766 # Get the list of product installation to add to the archive
767 l_products_name = sorted(config.APPLICATION.products.keys())
768 l_product_info = src.product.get_products_infos(l_products_name, config)
770 # suppress compile time products for binaries-only archives
771 if not options.sources:
772 update_config(config, logger, "compile_time", "yes")
777 l_sources_not_present = []
778 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
780 "APPLICATION" in config
781 and "properties" in config.APPLICATION
782 and "mesa_launcher_in_package" in config.APPLICATION.properties
783 and config.APPLICATION.properties.mesa_launcher_in_package == "yes"
785 generate_mesa_launcher = True
787 # first loop on products : filter products, analyse properties,
788 # and store the information that will be used to create the archive in the second loop
789 for prod_name, prod_info in l_product_info:
790 # skip product with property not_in_package set to yes
791 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
794 # Add the sources of the products that have the property
795 # sources_in_package : "yes"
796 if src.get_property_in_product_cfg(prod_info, "sources_in_package") == "yes":
797 if os.path.exists(prod_info.source_dir):
798 l_source_dir.append((prod_name, prod_info.source_dir))
800 l_sources_not_present.append(prod_name)
802 # ignore the native and fixed products for install directories
804 src.product.product_is_native(prod_info)
805 or src.product.product_is_fixed(prod_info)
806 or not src.product.product_compiles(prod_info)
810 # products with single_dir property will be installed in the PRODUCTS directory of the archive
811 is_single_dir = src.appli_test_property(
812 config, "single_install_dir", "yes"
813 ) and src.product.product_test_property(prod_info, "single_install_dir", "yes")
814 if src.product.check_installation(config, prod_info):
815 l_install_dir.append(
819 prod_info.install_dir,
821 prod_info.install_mode,
825 l_not_installed.append(prod_name)
827 # Add also the cpp generated modules (if any)
828 if src.product.product_is_cpp(prod_info):
830 for name_cpp in src.product.get_product_components(prod_info):
831 install_dir = os.path.join(
832 config.APPLICATION.workdir,
833 config.INTERNAL.config.install_dir,
836 if os.path.exists(install_dir):
837 l_install_dir.append(
838 (name_cpp, name_cpp, install_dir, False, "value")
841 l_not_installed.append(name_cpp)
843 # check the name of the directory that (could) contains the binaries
844 # from previous detar
845 binaries_from_detar = os.path.join(
846 config.APPLICATION.workdir, config.INTERNAL.config.binary_dir + config.VARS.dist
848 if os.path.exists(binaries_from_detar):
851 WARNING: existing binaries directory from previous detar installation:
853 To make new package from this, you have to:
854 1) install binaries in INSTALL directory with the script "install_bin.sh"
855 see README file for more details
856 2) or recompile everything in INSTALL with "sat compile" command
857 this step is long, and requires some linux packages to be installed
860 % binaries_from_detar
863 # Print warning or error if there are some missing products
864 if len(l_not_installed) > 0:
865 text_missing_prods = ""
866 for p_name in l_not_installed:
867 text_missing_prods += " - " + p_name + "\n"
868 if not options.force_creation:
869 msg = _("ERROR: there are missing product installations:")
871 "%s\n%s" % (src.printcolors.printcError(msg), text_missing_prods), 1
873 raise src.SatException(msg)
875 msg = _("WARNING: there are missing products installations:")
877 "%s\n%s" % (src.printcolors.printcWarning(msg), text_missing_prods), 1
880 # Do the same for sources
881 if len(l_sources_not_present) > 0:
882 text_missing_prods = ""
883 for p_name in l_sources_not_present:
884 text_missing_prods += "-" + p_name + "\n"
885 if not options.force_creation:
886 msg = _("ERROR: there are missing product sources:")
888 "%s\n%s" % (src.printcolors.printcError(msg), text_missing_prods), 1
890 raise src.SatException(msg)
892 msg = _("WARNING: there are missing products sources:")
894 "%s\n%s" % (src.printcolors.printcWarning(msg), text_missing_prods), 1
897 # construct the name of the directory that will contain the binaries
898 if src.architecture.is_windows():
899 binaries_dir_name = config.INTERNAL.config.binary_dir
901 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
902 # construct the correlation table between the product names, there
903 # actual install directories and there install directory in archive
912 prod_base_name = os.path.basename(install_dir)
913 if install_mode == "base":
914 # case of a products installed in base.
915 # because the archive is in base:no mode, the name of the install dir is different inside archive
916 # we set it to the product name or by PRODUCTS if single-dir
918 prod_base_name = config.INTERNAL.config.single_install_dir
920 prod_base_name = prod_info_name
921 path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
922 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
924 for prod_name, source_dir in l_source_dir:
925 path_in_archive = os.path.join("SOURCES", prod_name)
926 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
928 # create an archives of compilation logs, and insert it into the tarball
929 logpath = os.path.join(config.APPLICATION.workdir, "LOGS")
930 path_targz_logs = os.path.join(tmp_working_dir, "logs.tgz")
931 tar_log = tarfile.open(path_targz_logs, mode="w:gz")
932 tar_log.add(logpath, arcname="LOGS")
934 d_products["LOGS"] = (path_targz_logs, "logs.tgz")
936 # for packages of SALOME applications including KERNEL,
937 # we produce a salome launcher or a virtual application (depending on salome version)
938 if "KERNEL" in config.APPLICATION.products:
939 VersionSalome = src.get_salome_version(config)
940 # Case where SALOME has the launcher that uses the SalomeContext API
941 if VersionSalome >= MMP([7, 3, 0]):
942 # create the relative launcher and add it to the files to add
943 launcher_name = src.get_launcher_name(config)
944 launcher_package = produce_relative_launcher(
945 config, logger, tmp_working_dir, launcher_name, binaries_dir_name
947 d_products["launcher"] = (launcher_package, launcher_name)
949 # if the application contains mesa products, we generate in addition to the
950 # classical salome launcher a launcher using mesa and called mesa_salome
951 # (the mesa launcher will be used for remote usage through ssh).
952 if generate_mesa_launcher:
953 # if there is one : store the use_mesa property
954 restore_use_mesa_option = None
956 "properties" in config.APPLICATION
957 and "use_mesa" in config.APPLICATION.properties
959 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
961 # activate mesa property, and generate a mesa launcher
962 src.activate_mesa_property(config) # activate use_mesa property
963 launcher_mesa_name = "mesa_" + launcher_name
964 launcher_package_mesa = produce_relative_launcher(
971 d_products["launcher (mesa)"] = (
972 launcher_package_mesa,
976 # if there was a use_mesa value, we restore it
977 # else we set it to the default value "no"
978 if restore_use_mesa_option != None:
979 config.APPLICATION.properties.use_mesa = restore_use_mesa_option
981 config.APPLICATION.properties.use_mesa = "no"
984 # if we mix binaries and sources, we add a copy of the launcher,
985 # prefixed with "bin",in order to avoid clashes
986 launcher_copy_name = "bin" + launcher_name
987 launcher_package_copy = produce_relative_launcher(
994 d_products["launcher (copy)"] = (
995 launcher_package_copy,
999 # Provide a script for the creation of an application EDF style
1000 appli_script = product_appli_creation_script(
1001 config, logger, tmp_working_dir, binaries_dir_name
1004 d_products["appli script"] = (appli_script, "create_appli.py")
1006 # Put also the environment file
1007 env_file = produce_relative_env_files(
1008 config, logger, tmp_working_dir, binaries_dir_name
1011 if src.architecture.is_windows():
1012 filename = "env_launch.bat"
1014 filename = "env_launch.sh"
1015 d_products["environment file"] = (env_file, filename)
1017 # If option exe, produce an extra launcher based on specified exe
1019 exe_file = produce_relative_env_files(
1020 config, logger, tmp_working_dir, binaries_dir_name, options.exe
1023 if src.architecture.is_windows():
1024 filename = os.path.basename(options.exe) + ".bat"
1026 filename = os.path.basename(options.exe) + ".sh"
1027 d_products["exe file"] = (exe_file, filename)
1032 def source_package(sat, config, logger, options, tmp_working_dir):
1033 """Prepare a dictionary that stores all the needed directories and files to
1034 add in a source package.
1036 :param config Config: The global configuration.
1037 :param logger Logger: the logging instance
1038 :param options OptResult: the options of the launched command
1039 :param tmp_working_dir str: The temporary local directory containing some
1040 specific directories or files needed in the
1042 :return: the dictionary that stores all the needed directories and files to
1043 add in a source package.
1044 {label : (path_on_local_machine, path_in_archive)}
1049 # Get all the products that are prepared using an archive
1050 # unless ftp mode is specified (in this case the user of the
1051 # archive will get the sources through the ftp mode of sat prepare
1053 logger.write("Find archive products ... ")
1054 d_archives, l_pinfo_vcs = get_archives(config, logger)
1055 logger.write("Done\n")
1058 if not options.with_vcs and len(l_pinfo_vcs) > 0:
1059 # Make archives with the products that are not prepared using an archive
1060 # (git, cvs, svn, etc)
1061 logger.write("Construct archives for vcs products ... ")
1062 d_archives_vcs = get_archives_vcs(
1063 l_pinfo_vcs, sat, config, logger, tmp_working_dir
1065 logger.write("Done\n")
1068 logger.write("Create the project ... ")
1069 d_project = create_project_for_src_package(
1070 config, tmp_working_dir, options.with_vcs, options.ftp
1072 logger.write("Done\n")
1075 tmp_sat = add_salomeTools(config, tmp_working_dir)
1076 d_sat = {"salomeTools": (tmp_sat, "sat")}
1078 # Add a sat symbolic link if not win
1079 if not src.architecture.is_windows():
1083 # In the jobs, os.getcwd() can fail
1084 t = config.LOCAL.workdir
1085 os.chdir(tmp_working_dir)
1087 # create a symlink, to avoid reference with "salomeTool/.."
1089 if os.path.lexists("ARCHIVES"):
1090 os.remove("ARCHIVES")
1091 os.symlink("../ARCHIVES", "ARCHIVES")
1094 d_sat["sat archive link"] = (
1095 os.path.join(tmp_working_dir, "PROJECT", "ARCHIVES"),
1096 os.path.join("PROJECT", "ARCHIVES"),
1099 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
1103 def get_archives(config, logger):
1104 """Find all the products that are get using an archive and all the products
1105 that are get using a vcs (git, cvs, svn) repository.
1107 :param config Config: The global configuration.
1108 :param logger Logger: the logging instance
1109 :return: the dictionary {name_product :
1110 (local path of its archive, path in the package of its archive )}
1111 and the list of specific configuration corresponding to the vcs
1113 :rtype: (Dict, List)
1115 # Get the list of product informations
1116 l_products_name = config.APPLICATION.products.keys()
1117 l_product_info = src.product.get_products_infos(l_products_name, config)
1120 for p_name, p_info in l_product_info:
1121 # skip product with property not_in_package set to yes
1122 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1124 # ignore the native and fixed products
1125 if src.product.product_is_native(p_info) or src.product.product_is_fixed(
1129 if p_info.get_source == "archive":
1130 archive_path = p_info.archive_info.archive_name
1131 archive_name = os.path.basename(archive_path)
1132 d_archives[p_name] = (archive_path, os.path.join(ARCHIVE_DIR, archive_name))
1133 if src.appli_test_property(
1134 config, "pip", "yes"
1135 ) and src.product.product_test_property(p_info, "pip", "yes"):
1136 # if pip mode is activated, and product is managed by pip
1137 pip_wheels_dir = os.path.join(config.LOCAL.archive_dir, "wheels")
1139 "archive_prefix" in p_info.archive_info
1140 and p_info.archive_info.archive_prefix
1142 pip_wheel_pattern = os.path.join(
1144 "%s-%s*" % (p_info.archive_info.archive_prefix, p_info.version),
1147 pip_wheel_pattern = os.path.join(
1148 pip_wheels_dir, "%s-%s*" % (p_info.name, p_info.version)
1150 pip_wheel_path = glob.glob(pip_wheel_pattern)
1151 msg_pip_not_found = (
1152 "Error in get_archive, pip wheel for "
1153 "product %s-%s was not found in %s directory"
1155 msg_pip_two_or_more = (
1156 "Error in get_archive, several pip wheels for "
1157 "product %s-%s were found in %s directory"
1159 if len(pip_wheel_path) == 0:
1160 raise src.SatException(
1162 % (p_info.name, p_info.version, pip_wheels_dir)
1164 if len(pip_wheel_path) > 1:
1165 raise src.SatException(
1167 % (p_info.name, p_info.version, pip_wheels_dir)
1170 pip_wheel_name = os.path.basename(pip_wheel_path[0])
1171 d_archives[p_name + " (pip wheel)"] = (
1173 os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name),
1176 # this product is not managed by archive,
1177 # an archive of the vcs directory will be created by get_archive_vcs
1178 l_pinfo_vcs.append((p_name, p_info))
1180 return d_archives, l_pinfo_vcs
1183 def add_salomeTools(config, tmp_working_dir):
1184 """Prepare a version of salomeTools that has a specific local.pyconf file
1185 configured for a source package.
1187 :param config Config: The global configuration.
1188 :param tmp_working_dir str: The temporary local directory containing some
1189 specific directories or files needed in the
1191 :return: The path to the local salomeTools directory to add in the package
1194 # Copy sat in the temporary working directory
1195 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
1196 sat_running_path = src.Path(config.VARS.salometoolsway)
1197 sat_running_path.copy(sat_tmp_path)
1199 # Update the local.pyconf file that contains the path to the project
1200 local_pyconf_name = "local.pyconf"
1201 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
1202 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
1203 # Remove the .pyconf file in the root directory of salomeTools if there is
1204 # any. (For example when launching jobs, a pyconf file describing the jobs
1205 # can be here and is not useful)
1206 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
1207 for file_or_dir in files_or_dir_SAT:
1208 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
1209 file_path = os.path.join(tmp_working_dir, "salomeTools", file_or_dir)
1210 os.remove(file_path)
1212 ff = open(local_pyconf_file, "w")
1213 ff.write(LOCAL_TEMPLATE)
1216 return sat_tmp_path.path
1219 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
1220 """For sources package that require that all products are get using an
1221 archive, one has to create some archive for the vcs products.
1222 So this method calls the clean and source command of sat and then create
1225 :param l_pinfo_vcs List: The list of specific configuration corresponding to
1227 :param sat Sat: The Sat instance that can be called to clean and source the
1229 :param config Config: The global configuration.
1230 :param logger Logger: the logging instance
1231 :param tmp_working_dir str: The temporary local directory containing some
1232 specific directories or files needed in the
1234 :return: the dictionary that stores all the archives to add in the source
1235 package. {label : (path_on_local_machine, path_in_archive)}
1238 # clean the source directory of all the vcs products, then use the source
1239 # command and thus construct an archive that will not contain the patches
1240 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
1241 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
1242 logger.write(_("\nclean sources\n"))
1243 args_clean = config.VARS.application
1244 args_clean += " --sources --products "
1245 args_clean += ",".join(l_prod_names)
1246 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
1247 sat.clean(args_clean, batch=True, verbose=0, logger_add_link=logger)
1250 logger.write(_("get sources\n"))
1251 args_source = config.VARS.application
1252 args_source += " --products "
1253 args_source += ",".join(l_prod_names)
1254 svgDir = sat.cfg.APPLICATION.workdir
1255 tmp_local_working_dir = os.path.join(
1256 sat.cfg.APPLICATION.workdir, "tmp_package"
1257 ) # to avoid too much big files in /tmp
1258 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
1259 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
1260 # DBG.write("sat config id", id(sat.cfg), True)
1261 # shit as config is not same id() as for sat.source()
1262 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
1265 source.run(args_source, sat, logger) # use this mode as runner.cfg reference
1267 # make the new archives
1269 for pn, pinfo in l_pinfo_vcs:
1270 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
1271 logger.write("make archive vcs '%s'\n" % path_archive)
1272 d_archives_vcs[pn] = (path_archive, os.path.join(ARCHIVE_DIR, pn + ".tgz"))
1273 sat.cfg.APPLICATION.workdir = svgDir
1274 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
1275 return d_archives_vcs
1278 def make_bin_archive(prod_name, prod_info, where):
1279 """Create an archive of a product by searching its source directory.
1281 :param prod_name str: The name of the product.
1282 :param prod_info Config: The specific configuration corresponding to the
1284 :param where str: The path of the repository where to put the resulting
1286 :return: The path of the resulting archive
1289 path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1290 tar_prod = tarfile.open(path_targz_prod, mode="w:gz")
1291 bin_path = prod_info.install_dir
1292 tar_prod.add(bin_path, arcname=path_targz_prod)
1294 return path_targz_prod
1297 def make_archive(prod_name, prod_info, where):
1298 """Create an archive of a product by searching its source directory.
1300 :param prod_name str: The name of the product.
1301 :param prod_info Config: The specific configuration corresponding to the
1303 :param where str: The path of the repository where to put the resulting
1305 :return: The path of the resulting archive
1308 path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1309 tar_prod = tarfile.open(path_targz_prod, mode="w:gz")
1310 local_path = prod_info.source_dir
1313 local_path, arcname=prod_name, exclude=exclude_VCS_and_extensions_26
1316 tar_prod.add(local_path, arcname=prod_name, filter=exclude_VCS_and_extensions)
1318 return path_targz_prod
1321 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
1322 """Create a specific project for a source package.
1324 :param config Config: The global configuration.
1325 :param tmp_working_dir str: The temporary local directory containing some
1326 specific directories or files needed in the
1328 :param with_vcs boolean: True if the package is with vcs products (not
1329 transformed into archive products)
1330 :param with_ftp boolean: True if the package use ftp servers to get archives
1331 :return: The dictionary
1332 {"project" : (produced project, project path in the archive)}
1336 # Create in the working temporary directory the full project tree
1337 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1338 products_pyconf_tmp_dir = os.path.join(project_tmp_dir, "products")
1339 compil_scripts_tmp_dir = os.path.join(project_tmp_dir, "products", "compil_scripts")
1340 post_scripts_tmp_dir = os.path.join(project_tmp_dir, "products", "post_scripts")
1341 env_scripts_tmp_dir = os.path.join(project_tmp_dir, "products", "env_scripts")
1342 patches_tmp_dir = os.path.join(project_tmp_dir, "products", "patches")
1343 application_tmp_dir = os.path.join(project_tmp_dir, "applications")
1346 compil_scripts_tmp_dir,
1347 env_scripts_tmp_dir,
1348 post_scripts_tmp_dir,
1350 application_tmp_dir,
1352 src.ensure_path_exists(directory)
1354 # Create the pyconf that contains the information of the project
1355 project_pyconf_name = "project.pyconf"
1356 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1357 ff = open(project_pyconf_file, "w")
1358 ff.write(PROJECT_TEMPLATE)
1359 if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1360 ftp_path = 'ARCHIVEFTP : "' + config.PATHS.ARCHIVEFTP[0]
1361 for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1362 ftp_path = ftp_path + ":" + ftpserver
1364 ff.write("# ftp servers where to search for prerequisite archives\n")
1366 # add licence paths if any
1367 if len(config.PATHS.LICENCEPATH) > 0:
1368 licence_path = 'LICENCEPATH : "' + config.PATHS.LICENCEPATH[0]
1369 for path in config.PATHS.LICENCEPATH[1:]:
1370 licence_path = licence_path + ":" + path
1372 ff.write("\n# Where to search for licences\n")
1373 ff.write(licence_path)
1377 # Loop over the products to get there pyconf and all the scripts
1378 # (compilation, environment, patches)
1379 # and create the pyconf file to add to the project
1380 lproducts_name = config.APPLICATION.products.keys()
1381 l_products = src.product.get_products_infos(lproducts_name, config)
1382 for p_name, p_info in l_products:
1383 # skip product with property not_in_package set to yes
1384 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1386 find_product_scripts_and_pyconf(
1391 compil_scripts_tmp_dir,
1392 env_scripts_tmp_dir,
1393 post_scripts_tmp_dir,
1395 products_pyconf_tmp_dir,
1398 # for the application pyconf, we write directly the config
1399 # don't search for the original pyconf file
1400 # to avoid problems with overwrite sections and rm_products key
1401 write_application_pyconf(config, application_tmp_dir)
1403 d_project = {"project": (project_tmp_dir, PROJECT_DIR)}
1407 def find_product_scripts_and_pyconf(
1412 compil_scripts_tmp_dir,
1413 env_scripts_tmp_dir,
1414 post_scripts_tmp_dir,
1416 products_pyconf_tmp_dir,
1418 """Create a specific pyconf file for a given product. Get its environment
1419 script, its compilation script and patches and put it in the temporary
1420 working directory. This method is used in the source package in order to
1421 construct the specific project.
1423 :param p_name str: The name of the product.
1424 :param p_info Config: The specific configuration corresponding to the
1426 :param config Config: The global configuration.
1427 :param with_vcs boolean: True if the package is with vcs products (not
1428 transformed into archive products)
1429 :param compil_scripts_tmp_dir str: The path to the temporary compilation
1430 scripts directory of the project.
1431 :param env_scripts_tmp_dir str: The path to the temporary environment script
1432 directory of the project.
1433 :param post_scripts_tmp_dir str: The path to the temporary post-processing script
1434 directory of the project.
1435 :param patches_tmp_dir str: The path to the temporary patch scripts
1436 directory of the project.
1437 :param products_pyconf_tmp_dir str: The path to the temporary product
1438 scripts directory of the project.
1441 # read the pyconf of the product
1442 product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1444 # find the compilation script if any
1445 if src.product.product_has_script(p_info):
1446 compil_script_path = src.Path(p_info.compil_script)
1447 compil_script_path.copy(compil_scripts_tmp_dir)
1449 # find the environment script if any
1450 if src.product.product_has_env_script(p_info):
1451 env_script_path = src.Path(p_info.environ.env_script)
1452 env_script_path.copy(env_scripts_tmp_dir)
1454 # find the post script if any
1455 if src.product.product_has_post_script(p_info):
1456 post_script_path = src.Path(p_info.post_script)
1457 post_script_path.copy(post_scripts_tmp_dir)
1459 # find the patches if any
1460 if src.product.product_has_patches(p_info):
1461 patches = src.pyconf.Sequence()
1462 for patch_path in p_info.patches:
1463 p_path = src.Path(patch_path)
1464 p_path.copy(patches_tmp_dir)
1465 patches.append(os.path.basename(patch_path), "")
1467 if (not with_vcs) and src.product.product_is_vcs(p_info):
1468 # in non vcs mode, if the product is not archive, then make it become archive.
1470 # depending upon the incremental mode, select impacted sections
1472 "properties" in p_info
1473 and "incremental" in p_info.properties
1474 and p_info.properties.incremental == "yes"
1480 p_info.section + "_win",
1483 sections = [p_info.section]
1484 for section in sections:
1486 section in product_pyconf_cfg
1487 and "get_source" in product_pyconf_cfg[section]
1490 "sat package set archive mode to archive for product %s and section %s"
1493 product_pyconf_cfg[section].get_source = "archive"
1494 if not "archive_info" in product_pyconf_cfg[section]:
1495 product_pyconf_cfg[section].addMapping(
1496 "archive_info", src.pyconf.Mapping(product_pyconf_cfg), ""
1498 product_pyconf_cfg[section].archive_info.archive_name = (
1499 p_info.name + ".tgz"
1502 # save git repositories for vcs products, even if archive is not in VCS mode
1503 # in this case the user will be able to change get_source flag and work with git
1504 if src.product.product_is_vcs(p_info):
1505 # in vcs mode we must replace explicitely the git server url
1506 # (or it will not be found later because project files are not exported in archives)
1507 for section in product_pyconf_cfg:
1508 # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
1509 if "git_info" in product_pyconf_cfg[section]:
1510 for repo in product_pyconf_cfg[section].git_info:
1511 if repo in p_info.git_info:
1512 product_pyconf_cfg[section].git_info[repo] = p_info.git_info[
1516 # write the pyconf file to the temporary project location
1517 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir, p_name + ".pyconf")
1518 ff = open(product_tmp_pyconf_path, "w")
1519 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1520 product_pyconf_cfg.__save__(ff, 1)
1524 def write_application_pyconf(config, application_tmp_dir):
1525 """Write the application pyconf file in the specific temporary
1526 directory containing the specific project of a source package.
1528 :param config Config: The global configuration.
1529 :param application_tmp_dir str: The path to the temporary application
1530 scripts directory of the project.
1532 application_name = config.VARS.application
1533 # write the pyconf file to the temporary application location
1534 application_tmp_pyconf_path = os.path.join(
1535 application_tmp_dir, application_name + ".pyconf"
1537 with open(application_tmp_pyconf_path, "w") as f:
1538 f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1539 res = src.pyconf.Config()
1540 app = src.pyconf.deepCopyMapping(config.APPLICATION)
1542 # set base mode to "no" for the archive
1545 # Change the workdir
1546 app.workdir = src.pyconf.Reference(app, src.pyconf.DOLLAR, "LOCAL.workdir")
1547 res.addMapping("APPLICATION", app, "")
1548 res.__save__(f, evaluated=False)
1551 def sat_package(config, tmp_working_dir, options, logger):
1552 """Prepare a dictionary that stores all the needed directories and files to
1553 add in a salomeTool package.
1555 :param tmp_working_dir str: The temporary local working directory
1556 :param options OptResult: the options of the launched command
1557 :return: the dictionary that stores all the needed directories and files to
1558 add in a salomeTool package.
1559 {label : (path_on_local_machine, path_in_archive)}
1564 # we include sat himself
1565 d_project["all_sat"] = (config.VARS.salometoolsway, "")
1567 # and we overwrite local.pyconf with a clean wersion.
1568 local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1569 local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1570 local_cfg = src.pyconf.Config(local_file_path)
1571 local_cfg.PROJECTS.project_file_paths = src.pyconf.Sequence(local_cfg.PROJECTS)
1572 local_cfg.LOCAL["base"] = "default"
1573 local_cfg.LOCAL["workdir"] = "default"
1574 local_cfg.LOCAL["log_dir"] = "default"
1575 local_cfg.LOCAL["archive_dir"] = "default"
1576 local_cfg.LOCAL["VCS"] = "None"
1577 local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1579 # if the archive contains a project, we write its relative path in local.pyconf
1581 project_arch_path = os.path.join(
1582 "projects", options.project, os.path.basename(options.project_file_path)
1584 local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1586 ff = open(local_pyconf_tmp_path, "w")
1587 local_cfg.__save__(ff, 1)
1589 d_project["local.pyconf"] = (local_pyconf_tmp_path, "data/local.pyconf")
1593 def project_package(
1602 """Prepare a dictionary that stores all the needed directories and files to
1603 add in a project package.
1605 :param project_file_path str: The path to the local project.
1606 :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1607 :param tmp_working_dir str: The temporary local directory containing some
1608 specific directories or files needed in the
1610 :param embedded_in_sat boolean : the project package is embedded in a sat package
1611 :return: the dictionary that stores all the needed directories and files to
1612 add in a project package.
1613 {label : (path_on_local_machine, path_in_archive)}
1617 # Read the project file and get the directories to add to the package
1620 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1624 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n"""
1625 % (name_project, project_file_path)
1627 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1628 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1631 "APPLICATIONPATH": "applications",
1632 "PRODUCTPATH": "products",
1634 "MACHINEPATH": "machines",
1637 paths["ARCHIVEPATH"] = "archives"
1639 # Loop over the project paths and add it
1640 project_file_name = os.path.basename(project_file_path)
1642 if path not in project_pyconf_cfg:
1645 dest_path = os.path.join("projects", name_project, paths[path])
1646 project_file_dest = os.path.join(
1647 "projects", name_project, project_file_name
1650 dest_path = paths[path]
1651 project_file_dest = project_file_name
1653 # Add the directory to the files to add in the package
1654 d_project[path] = (project_pyconf_cfg[path], dest_path)
1656 # Modify the value of the path in the package
1657 project_pyconf_cfg[path] = src.pyconf.Reference(
1660 'project_path + "/' + paths[path] + '"',
1663 # Modify some values
1664 if "project_path" not in project_pyconf_cfg:
1665 project_pyconf_cfg.addMapping(
1666 "project_path", src.pyconf.Mapping(project_pyconf_cfg), ""
1668 project_pyconf_cfg.project_path = src.pyconf.Reference(
1669 project_pyconf_cfg, src.pyconf.DOLLAR, "PWD"
1671 # we don't want to export these two fields
1672 project_pyconf_cfg.__delitem__("file_path")
1673 project_pyconf_cfg.__delitem__("PWD")
1675 project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1677 # Write the project pyconf file
1678 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1679 ff = open(project_pyconf_tmp_path, "w")
1680 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1681 project_pyconf_cfg.__save__(ff, 1)
1683 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1688 def add_readme(config, options, where):
1689 readme_path = os.path.join(where, "README")
1690 with codecs.open(readme_path, "w", "utf-8") as f:
1692 # templates for building the header
1694 # This package was generated with sat $version
1697 # Distribution : $dist
1699 In the following, $$ROOT represents the directory where you have installed
1700 SALOME (the directory where this file is located).
1703 if src.architecture.is_windows():
1704 readme_header = readme_header.replace("$$ROOT", "%ROOT%")
1705 readme_compilation_with_binaries = """
1707 compilation based on the binaries used as prerequisites
1708 =======================================================
1710 If you fail to compile the complete application (for example because
1711 you are not root on your system and cannot install missing packages), you
1712 may try a partial compilation based on the binaries.
1713 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1714 and do some substitutions on cmake and .la files (replace the build directories
1716 The procedure to do it is:
1717 1) Remove or rename INSTALL directory if it exists
1718 2) Execute the shell script install_bin.sh:
1721 3) Use SalomeTool (as explained in Sources section) and compile only the
1722 modules you need to (with -p option)
1725 readme_header_tpl = string.Template(readme_header)
1726 readme_template_path_bin = os.path.join(
1727 config.VARS.internal_dir, "README_BIN.template"
1729 readme_template_path_bin_launcher = os.path.join(
1730 config.VARS.internal_dir, "README_LAUNCHER.template"
1732 readme_template_path_bin_virtapp = os.path.join(
1733 config.VARS.internal_dir, "README_BIN_VIRTUAL_APP.template"
1735 readme_template_path_src = os.path.join(
1736 config.VARS.internal_dir, "README_SRC.template"
1738 readme_template_path_pro = os.path.join(
1739 config.VARS.internal_dir, "README_PROJECT.template"
1741 readme_template_path_sat = os.path.join(
1742 config.VARS.internal_dir, "README_SAT.template"
1745 # prepare substitution dictionary
1747 d["user"] = config.VARS.user
1748 d["date"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1749 d["version"] = src.get_salometool_version(config)
1750 d["dist"] = config.VARS.dist
1751 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1753 if options.binaries or options.sources:
1754 d["application"] = config.VARS.application
1755 d["BINARIES"] = config.INTERNAL.config.binary_dir
1756 d["SEPARATOR"] = config.VARS.sep
1757 if src.architecture.is_windows():
1758 d["operatingSystem"] = "Windows"
1759 d["PYTHON3"] = "python3"
1760 d["ROOT"] = "%ROOT%"
1762 d["operatingSystem"] = "Linux"
1765 f.write("# Application: " + d["application"] + "\n")
1766 if "KERNEL" in config.APPLICATION.products:
1767 VersionSalome = src.get_salome_version(config)
1768 # Case where SALOME has the launcher that uses the SalomeContext API
1769 if VersionSalome >= MMP([7, 3, 0]):
1770 d["launcher"] = config.APPLICATION.profile.launcher_name
1772 d["virtual_app"] = "runAppli" # this info is not used now)
1774 # write the specific sections
1775 if options.binaries:
1776 f.write(src.template.substitute(readme_template_path_bin, d))
1777 if "virtual_app" in d:
1778 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1780 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1783 f.write(src.template.substitute(readme_template_path_src, d))
1785 if options.binaries and options.sources and not src.architecture.is_windows():
1786 f.write(readme_compilation_with_binaries)
1789 f.write(src.template.substitute(readme_template_path_pro, d))
1792 f.write(src.template.substitute(readme_template_path_sat, d))
1797 def update_config(config, logger, prop, value):
1798 """Remove from config.APPLICATION.products the products that have the property given as input.
1800 :param config Config: The global config.
1801 :param prop str: The property to filter
1802 :param value str: The value of the property to filter
1804 # if there is no APPLICATION (ex sat package -t) : nothing to do
1805 if "APPLICATION" in config:
1806 l_product_to_remove = []
1807 for product_name in config.APPLICATION.products.keys():
1808 prod_cfg = src.product.get_product_config(config, product_name)
1809 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1810 l_product_to_remove.append(product_name)
1811 for product_name in l_product_to_remove:
1812 config.APPLICATION.products.__delitem__(product_name)
1814 "Remove product %s with property %s\n" % (product_name, prop), 5
1819 """method that is called when salomeTools is called with --help option.
1821 :return: The text to display for the package command description.
1826 The package command creates a tar file archive of a product.
1827 There are four kinds of archive, which can be mixed:
1829 1 - The binary archive.
1830 It contains the product installation directories plus a launcher.
1831 2 - The sources archive.
1832 It contains the product archives, a project (the application plus salomeTools).
1833 3 - The project archive.
1834 It contains a project (give the project file path as argument).
1835 4 - The salomeTools archive.
1836 It contains code utility salomeTools.
1839 >> sat package SALOME-master --binaries --sources"""
1843 def run(args, runner, logger):
1844 """method that is called when salomeTools is called with package parameter."""
1847 (options, args) = parser.parse_args(args)
1849 # Check that a type of package is called, and only one
1850 all_option_types = (
1853 options.project not in ["", None],
1855 options.bin_products,
1858 # Check if no option for package type
1859 if all_option_types.count(True) == 0:
1861 "Error: Precise a type for the package\nUse one of the "
1862 "following options: --binaries, --sources, --project or"
1863 " --salometools, --bin_products"
1865 logger.write(src.printcolors.printcError(msg), 1)
1866 logger.write("\n", 1)
1868 do_create_package = (
1869 options.binaries or options.sources or options.project or options.sat
1872 if options.bin_products:
1873 ret = bin_products_archives(runner.cfg, logger, options.with_vcs)
1876 if not do_create_package:
1879 # continue to create a tar.gz package
1881 # The repository where to put the package if not Binary or Source
1882 package_default_path = runner.cfg.LOCAL.workdir
1883 # if the package contains binaries or sources:
1884 if options.binaries or options.sources or options.bin_products:
1885 # Check that the command has been called with an application
1886 src.check_config_has_application(runner.cfg)
1888 # Display information
1890 _("Packaging application %s\n")
1891 % src.printcolors.printcLabel(runner.cfg.VARS.application),
1895 # Get the default directory where to put the packages
1896 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1897 src.ensure_path_exists(package_default_path)
1899 # if the package contains a project:
1901 # check that the project is visible by SAT
1902 projectNameFile = options.project + ".pyconf"
1904 for i in runner.cfg.PROJECTS.project_file_paths:
1905 baseName = os.path.basename(i)
1906 if baseName == projectNameFile:
1910 if foundProject is None:
1911 local_path = os.path.join(
1912 runner.cfg.VARS.salometoolsway, "data", "local.pyconf"
1915 """ERROR: the project %(1)s is not visible by salomeTools.
1919 Please add it in file:
1922 "1": options.project,
1923 "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths),
1927 logger.write(src.printcolors.printcError(msg), 1)
1928 logger.write("\n", 1)
1931 options.project_file_path = foundProject
1932 src.printcolors.print_value(
1933 logger, "Project path", options.project_file_path, 2
1936 # Remove the products that are filtered by the --without_properties option
1937 if options.without_properties:
1938 prop, value = options.without_properties
1939 update_config(runner.cfg, logger, prop, value)
1941 # Remove from config the products that have the not_in_package property
1942 update_config(runner.cfg, logger, "not_in_package", "yes")
1944 # get the name of the archive or build it
1946 if os.path.basename(options.name) == options.name:
1947 # only a name (not a path)
1948 archive_name = options.name
1949 dir_name = package_default_path
1951 archive_name = os.path.basename(options.name)
1952 dir_name = os.path.dirname(options.name)
1954 # suppress extension
1955 if archive_name[-len(".tgz") :] == ".tgz":
1956 archive_name = archive_name[: -len(".tgz")]
1957 if archive_name[-len(".tar.gz") :] == ".tar.gz":
1958 archive_name = archive_name[: -len(".tar.gz")]
1962 dir_name = package_default_path
1963 if options.binaries or options.sources:
1964 archive_name = runner.cfg.APPLICATION.name
1966 if options.binaries:
1967 archive_name += "-" + runner.cfg.VARS.dist
1970 archive_name += "-SRC"
1971 if options.with_vcs:
1972 archive_name += "-VCS"
1975 archive_name += "salomeTools_" + src.get_salometool_version(runner.cfg)
1980 archive_name += "satproject_" + options.project
1982 if len(archive_name) == 0: # no option worked
1984 "Error: Cannot name the archive\n"
1985 " check if at least one of the following options was "
1986 "selected : --binaries, --sources, --project or"
1989 logger.write(src.printcolors.printcError(msg), 1)
1990 logger.write("\n", 1)
1993 path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1995 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1997 # Create a working directory for all files that are produced during the
1998 # package creation and that will be removed at the end of the command
1999 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
2000 src.ensure_path_exists(tmp_working_dir)
2001 logger.write("\n", 5)
2002 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir), 5)
2004 logger.write("\n", 3)
2006 msg = _("Preparation of files to add to the archive")
2007 logger.write(src.printcolors.printcLabel(msg), 2)
2008 logger.write("\n", 2)
2010 d_files_to_add = {} # content of the archive
2012 # a dict to hold paths that will need to be substitute for users recompilations
2013 d_paths_to_substitute = {}
2015 if options.binaries:
2016 d_bin_files_to_add = binary_package(
2017 runner.cfg, logger, options, tmp_working_dir
2019 # for all binaries dir, store the substitution that will be required
2020 # for extra compilations
2021 for key in d_bin_files_to_add:
2022 if key.endswith("(bin)"):
2023 source_dir = d_bin_files_to_add[key][0]
2024 path_in_archive = d_bin_files_to_add[key][1].replace(
2025 runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
2026 runner.cfg.INTERNAL.config.install_dir,
2028 if os.path.basename(source_dir) == os.path.basename(path_in_archive):
2029 # if basename is the same we will just substitute the dirname
2030 d_paths_to_substitute[
2031 os.path.dirname(source_dir)
2032 ] = os.path.dirname(path_in_archive)
2034 d_paths_to_substitute[source_dir] = path_in_archive
2036 d_files_to_add.update(d_bin_files_to_add)
2038 d_files_to_add.update(
2039 source_package(runner, runner.cfg, logger, options, tmp_working_dir)
2041 if options.binaries:
2042 # for archives with bin and sources we provide a shell script able to
2043 # install binaries for compilation
2044 file_install_bin = produce_install_bin_file(
2048 d_paths_to_substitute,
2051 d_files_to_add.update({"install_bin": (file_install_bin, "install_bin.sh")})
2052 logger.write("substitutions that need to be done later : \n", 5)
2053 logger.write(str(d_paths_to_substitute), 5)
2054 logger.write("\n", 5)
2056 # --salomeTool option is not considered when --sources is selected, as this option
2057 # already brings salomeTool!
2059 d_files_to_add.update(
2060 sat_package(runner.cfg, tmp_working_dir, options, logger)
2064 DBG.write("config for package %s" % options.project, runner.cfg)
2065 d_files_to_add.update(
2069 options.project_file_path,
2077 if not (d_files_to_add):
2078 msg = _("Error: Empty dictionnary to build the archive!\n")
2079 logger.write(src.printcolors.printcError(msg), 1)
2080 logger.write("\n", 1)
2083 # Add the README file in the package
2084 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
2085 d_files_to_add["README"] = (local_readme_tmp_path, "README")
2087 # Add the additional files of option add_files
2088 if options.add_files:
2089 for file_path in options.add_files:
2090 if not os.path.exists(file_path):
2091 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
2093 file_name = os.path.basename(file_path)
2094 d_files_to_add[file_name] = (file_path, file_name)
2096 logger.write("\n", 2)
2097 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
2098 logger.write("\n", 2)
2100 "\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5
2105 # Creating the object tarfile
2106 tar = tarfile.open(path_targz, mode="w:gz")
2108 # get the filtering function if needed
2110 filter_function = exclude_VCS_and_extensions_26
2112 filter_function = exclude_VCS_and_extensions
2114 # Add the files to the tarfile object
2116 tar, archive_name, d_files_to_add, logger, f_exclude=filter_function
2119 except KeyboardInterrupt:
2120 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
2122 _("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1
2124 # remove the working directory
2125 shutil.rmtree(tmp_working_dir)
2126 logger.write(_("OK"), 1)
2127 logger.write(_("\n"), 1)
2130 # case if no application, only package sat as 'sat package -t'
2132 app = runner.cfg.APPLICATION
2136 # unconditionaly remove the tmp_local_working_dir
2138 tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
2139 if os.path.isdir(tmp_local_working_dir):
2140 shutil.rmtree(tmp_local_working_dir)
2142 # remove the tmp directory, unless user has registered as developer
2143 if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
2144 shutil.rmtree(tmp_working_dir)
2146 # Print again the path of the package
2147 logger.write("\n", 2)
2148 src.printcolors.print_value(logger, "Package path", path_targz, 2)