3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
31 from application import get_SALOME_modules
32 import src.debug as DBG
34 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
41 ARCHIVE_DIR = "ARCHIVES"
42 PROJECT_DIR = "PROJECT"
44 IGNORED_DIRS = [".git", ".svn"]
45 IGNORED_EXTENSIONS = []
47 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
49 if src.architecture.is_windows():
50 PROJECT_TEMPLATE = """#!/usr/bin/env python
53 # The path to the archive root directory
54 root_path : $PWD + "/../"
56 project_path : $PWD + "/"
58 # Where to search the archives of the products
59 ARCHIVEPATH : $root_path + "ARCHIVES"
60 # Where to search the pyconf of the applications
61 APPLICATIONPATH : $project_path + "applications/"
62 # Where to search the pyconf of the products
63 PRODUCTPATH : $project_path + "products/"
64 # Where to search the pyconf of the jobs of the project
65 JOBPATH : $project_path + "jobs/"
66 # Where to search the pyconf of the machines of the project
67 MACHINEPATH : $project_path + "machines/"
70 PROJECT_TEMPLATE = """#!/usr/bin/env python
74 project_path : $PWD + "/"
76 # Where to search the archives of the products
77 ARCHIVEPATH : $project_path + "ARCHIVES"
78 # Where to search the pyconf of the applications
79 APPLICATIONPATH : $project_path + "applications/"
80 # Where to search the pyconf of the products
81 PRODUCTPATH : $project_path + "products/"
82 # Where to search the pyconf of the jobs of the project
83 JOBPATH : $project_path + "jobs/"
84 # Where to search the pyconf of the machines of the project
85 MACHINEPATH : $project_path + "machines/"
89 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
97 archive_dir : 'default'
106 $LOCAL.workdir + $VARS.sep + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"
111 # Define all possible option for the package command : sat package <options>
112 parser = src.options.Options()
113 parser.add_option('b', 'binaries', 'boolean', 'binaries',
114 _('Optional: Produce a binary package.'), False)
115 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
116 _('Optional: Only binary package: produce the archive even if '
117 'there are some missing products.'), False)
118 parser.add_option('s', 'sources', 'boolean', 'sources',
119 _('Optional: Produce a compilable archive of the sources of the '
120 'application.'), False)
121 parser.add_option('', 'bin_products', 'boolean', 'bin_products',
122 _('Optional: Create binary archives for all products.'), False)
123 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
124 _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
125 'Sat prepare will use VCS mode instead to retrieve them.'
126 '\n Also, when combined with "--bin_products" option, restrict the building of product archives to VCS products.'),
128 parser.add_option('', 'ftp', 'boolean', 'ftp',
129 _('Optional: Do not embed archives for products in archive mode.'
130 'Sat prepare will use ftp instead to retrieve them'),
132 parser.add_option('e', 'exe', 'string', 'exe',
133 _('Optional: Produce an extra launcher based upon the exe given as argument.'), "")
134 parser.add_option('p', 'project', 'string', 'project',
135 _('Optional: Produce an archive that contains a project.'), "")
136 parser.add_option('t', 'salometools', 'boolean', 'sat',
137 _('Optional: Produce an archive that contains salomeTools.'), False)
138 parser.add_option('n', 'name', 'string', 'name',
139 _('Optional: The name or full path of the archive.'), None)
140 parser.add_option('', 'add_files', 'list2', 'add_files',
141 _('Optional: The list of additional files to add to the archive.'), [])
142 parser.add_option('', 'without_properties', 'properties', 'without_properties',
143 _('Optional: Filter the products by their properties.\n\tSyntax: '
144 '--without_properties <property>:<value>'))
147 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
148 '''Create an archive containing all directories and files that are given in
149 the d_content argument.
151 :param tar tarfile: The tarfile instance used to make the archive.
152 :param name_archive str: The name of the archive to make.
153 :param d_content dict: The dictionary that contain all directories and files
154 to add in the archive.
156 (path_on_local_machine, path_in_archive)
157 :param logger Logger: the logging instance
158 :param f_exclude Function: the function that filters
159 :return: 0 if success, 1 if not.
162 # get the max length of the messages in order to make the display
163 max_len = len(max(d_content.keys(), key=len))
166 # loop over each directory or file stored in the d_content dictionary
167 names = sorted(d_content.keys())
168 DBG.write("add tar names", names)
170 # used to avoid duplications (for pip install in python, or single_install_dir cases)
173 # display information
174 len_points = max_len - len(name) + 3
175 local_path, archive_path = d_content[name]
176 in_archive = os.path.join(name_archive, archive_path)
177 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
178 # Get the local path and the path in archive
179 # of the directory or file to add
180 # Add it in the archive
182 key=local_path+"->"+in_archive
183 if key not in already_added:
187 exclude=exclude_VCS_and_extensions_26)
191 filter=exclude_VCS_and_extensions)
192 already_added.add(key)
193 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
194 except Exception as e:
195 logger.write(src.printcolors.printcError(_("KO ")), 3)
196 logger.write(str(e), 3)
198 logger.write("\n", 3)
202 def exclude_VCS_and_extensions_26(filename):
203 ''' The function that is used to exclude from package the link to the
204 VCS repositories (like .git) (only for python 2.6)
206 :param filename Str: The filname to exclude (or not).
207 :return: True if the file has to be exclude
210 for dir_name in IGNORED_DIRS:
211 if dir_name in filename:
213 for extension in IGNORED_EXTENSIONS:
214 if filename.endswith(extension):
218 def exclude_VCS_and_extensions(tarinfo):
219 ''' The function that is used to exclude from package the link to the
220 VCS repositories (like .git)
222 :param filename Str: The filname to exclude (or not).
223 :return: None if the file has to be exclude
224 :rtype: tarinfo or None
226 filename = tarinfo.name
227 for dir_name in IGNORED_DIRS:
228 if dir_name in filename:
230 for extension in IGNORED_EXTENSIONS:
231 if filename.endswith(extension):
235 def produce_relative_launcher(config,
240 '''Create a specific SALOME launcher for the binary package. This launcher
243 :param config Config: The global configuration.
244 :param logger Logger: the logging instance
245 :param file_dir str: the directory where to put the launcher
246 :param file_name str: The launcher name
247 :param binaries_dir_name str: the name of the repository where the binaries
249 :return: the path of the produced launcher
253 # set base mode to "no" for the archive - save current mode to restore it at the end
254 if "base" in config.APPLICATION:
255 base_setting=config.APPLICATION.base
258 config.APPLICATION.base="no"
260 # get KERNEL installation path
261 kernel_info = src.product.get_product_config(config, "KERNEL")
262 kernel_base_name=os.path.basename(kernel_info.install_dir)
263 if kernel_info.install_mode == "base":
264 # case of kernel installed in base. the kernel install dir name is different in the archive
265 kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
267 kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
269 # set kernel bin dir (considering fhs property)
270 kernel_cfg = src.product.get_product_config(config, "KERNEL")
271 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
272 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
274 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
276 # check if the application contains an application module
277 # check also if the application has a distene product,
278 # in this case get its licence file name
279 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
280 salome_application_name="Not defined"
281 distene_licence_file_name=False
282 for prod_name, prod_info in l_product_info:
283 # look for a "salome application" and a distene product
284 if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
285 distene_licence_file_name = src.product.product_has_licence(prod_info,
286 config.PATHS.LICENCEPATH)
287 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
288 salome_application_name=prod_info.name
290 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
291 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
292 if salome_application_name == "Not defined":
293 app_root_dir=kernel_root_dir
295 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
298 additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
299 config.VARS.sep + bin_kernel_install_dir
300 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
301 additional_env['sat_python_version'] = 3
303 additional_env['sat_python_version'] = 2
305 additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
306 launcher_name = src.get_launcher_name(config)
307 additional_env['APPLI'] = "out_dir_Path" + config.VARS.sep + file_name
309 # create an environment file writer
310 writer = src.environment.FileEnvWriter(config,
316 filepath = os.path.join(file_dir, file_name)
318 writer.write_env_file(filepath,
321 additional_env=additional_env,
323 for_package = binaries_dir_name)
325 # Little hack to put out_dir_Path outside the strings
326 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
327 src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
329 # A hack to put a call to a file for distene licence.
330 # It does nothing to an application that has no distene product
331 if distene_licence_file_name:
332 logger.write("Application has a distene licence file! We use it in package launcher", 5)
333 hack_for_distene_licence(filepath, distene_licence_file_name)
335 # change the rights in order to make the file executable for everybody
345 # restore modified setting by its initial value
346 config.APPLICATION.base=base_setting
350 def hack_for_distene_licence(filepath, licence_file):
351 '''Replace the distene licence env variable by a call to a file.
353 :param filepath Str: The path to the launcher to modify.
355 shutil.move(filepath, filepath + "_old")
357 filein = filepath + "_old"
358 fin = open(filein, "r")
359 fout = open(fileout, "w")
360 text = fin.readlines()
361 # Find the Distene section
363 for i,line in enumerate(text):
364 if "# Set DISTENE License" in line:
368 # No distene product, there is nothing to do
374 del text[num_line +1]
375 del text[num_line +1]
376 text_to_insert =""" try:
377 distene_licence_file=r"%s"
378 if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
379 import importlib.util
380 spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
381 distene=importlib.util.module_from_spec(spec_dist)
382 spec_dist.loader.exec_module(distene)
385 distene = imp.load_source('distene_licence', distene_licence_file)
386 distene.set_distene_variables(context)
388 pass\n""" % licence_file
389 text.insert(num_line + 1, text_to_insert)
396 def produce_relative_env_files(config,
401 '''Create some specific environment files for the binary package. These
402 files use relative paths.
404 :param config Config: The global configuration.
405 :param logger Logger: the logging instance
406 :param file_dir str: the directory where to put the files
407 :param binaries_dir_name str: the name of the repository where the binaries
409 :param exe_name str: if given generate a launcher executing exe_name
410 :return: the list of path of the produced environment files
414 # set base mode to "no" for the archive - save current mode to restore it at the end
415 if "base" in config.APPLICATION:
416 base_setting=config.APPLICATION.base
419 config.APPLICATION.base="no"
421 # create an environment file writer
422 writer = src.environment.FileEnvWriter(config,
427 if src.architecture.is_windows():
429 filename = "env_launch.bat"
432 filename = "env_launch.sh"
435 filename=os.path.basename(exe_name)
438 filepath = writer.write_env_file(filename,
441 for_package = binaries_dir_name)
443 # Little hack to put out_dir_Path as environment variable
444 if src.architecture.is_windows() :
445 src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
446 src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
447 src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
449 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
450 src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
451 src.replace_in_file(filepath, ';out_dir_Path', ';${out_dir_Path}' )
454 if src.architecture.is_windows():
455 cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
457 cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
458 with open(filepath, "a") as exe_launcher:
459 exe_launcher.write(cmd)
461 # change the rights in order to make the file executable for everybody
471 # restore modified setting by its initial value
472 config.APPLICATION.base=base_setting
476 def produce_install_bin_file(config,
481 '''Create a bash shell script which do substitutions in BIRARIES dir
482 in order to use it for extra compilations.
484 :param config Config: The global configuration.
485 :param logger Logger: the logging instance
486 :param file_dir str: the directory where to put the files
487 :param d_sub, dict: the dictionnary that contains the substitutions to be done
488 :param file_name str: the name of the install script file
489 :return: the produced file
493 filepath = os.path.join(file_dir, file_name)
494 # open the file and write into it
495 # use codec utf-8 as sat variables are in unicode
496 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
497 installbin_template_path = os.path.join(config.VARS.internal_dir,
498 "INSTALL_BIN.template")
500 # build the name of the directory that will contain the binaries
501 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
502 # build the substitution loop
503 loop_cmd = "for f in $(grep -RIl"
505 loop_cmd += " -e "+ key
506 loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
509 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
510 loop_cmd += ' " $f\ndone'
513 d["BINARIES_DIR"] = binaries_dir_name
514 d["SUBSTITUTION_LOOP"]=loop_cmd
515 d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
517 # substitute the template and write it in file
518 content=src.template.substitute(installbin_template_path, d)
519 installbin_file.write(content)
520 # change the rights in order to make the file executable for everybody
532 def product_appli_creation_script(config,
536 '''Create a script that can produce an application (EDF style) in the binary
539 :param config Config: The global configuration.
540 :param logger Logger: the logging instance
541 :param file_dir str: the directory where to put the file
542 :param binaries_dir_name str: the name of the repository where the binaries
544 :return: the path of the produced script file
547 template_name = "create_appli.py.for_bin_packages.template"
548 template_path = os.path.join(config.VARS.internal_dir, template_name)
549 text_to_fill = open(template_path, "r").read()
550 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
551 '"' + binaries_dir_name + '"')
554 for product_name in get_SALOME_modules(config):
555 product_info = src.product.get_product_config(config, product_name)
557 if src.product.product_is_smesh_plugin(product_info):
560 if 'install_dir' in product_info and bool(product_info.install_dir):
561 if src.product.product_is_cpp(product_info):
563 for cpp_name in src.product.get_product_components(product_info):
564 line_to_add = ("<module name=\"" +
566 "\" gui=\"yes\" path=\"''' + "
567 "os.path.join(dir_bin_name, \"" +
568 cpp_name + "\") + '''\"/>")
571 line_to_add = ("<module name=\"" +
573 "\" gui=\"yes\" path=\"''' + "
574 "os.path.join(dir_bin_name, \"" +
575 product_name + "\") + '''\"/>")
576 text_to_add += line_to_add + "\n"
578 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
580 tmp_file_path = os.path.join(file_dir, "create_appli.py")
581 ff = open(tmp_file_path, "w")
582 ff.write(filled_text)
585 # change the rights in order to make the file executable for everybody
586 os.chmod(tmp_file_path,
597 def bin_products_archives(config, logger, only_vcs):
598 '''Prepare binary packages for all products
599 :param config Config: The global configuration.
600 :return: the error status
604 logger.write("Make %s binary archives\n" % config.VARS.dist)
605 # Get the default directory where to put the packages
606 binpackage_path = os.path.join(config.APPLICATION.workdir, "PACKAGE", "products")
607 src.ensure_path_exists(binpackage_path)
608 # Get the list of product installation to add to the archive
609 l_products_name = sorted(config.APPLICATION.products.keys())
610 l_product_info = src.product.get_products_infos(l_products_name,
612 # first loop on products : filter products, analyse properties,
613 # and store the information that will be used to create the archive in the second loop
614 l_not_installed=[] # store not installed products for warning at the end
615 for prod_name, prod_info in l_product_info:
616 # ignore the native and fixed products for install directories
617 if (src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
618 or src.product.product_is_native(prod_info)
619 or src.product.product_is_fixed(prod_info)
620 or not src.product.product_compiles(prod_info)):
622 if only_vcs and not src.product.product_is_vcs(prod_info):
624 if not src.product.check_installation(config, prod_info):
625 l_not_installed.append(prod_name)
626 continue # product is not installed, we skip it
627 # prepare call to make_bin_archive
628 path_targz_prod = os.path.join(binpackage_path, prod_name + '-' + prod_info.version.replace("/", "_") + "-" + config.VARS.dist + PACKAGE_EXT)
629 targz_prod = tarfile.open(path_targz_prod, mode='w:gz')
630 bin_path = prod_info.install_dir
631 targz_prod.add(bin_path)
633 # Python program to find MD5 hash value of a file
635 with open(path_targz_prod,"rb") as f:
636 bytes = f.read() # read file as bytes
637 readable_hash = hashlib.md5(bytes).hexdigest();
638 with open(path_targz_prod+".md5", "w") as md5sum:
639 md5sum.write("%s %s" % (readable_hash, os.path.basename(path_targz_prod)))
640 logger.write(" archive : %s (md5sum = %s)\n" % (path_targz_prod, readable_hash))
644 def binary_package(config, logger, options, tmp_working_dir):
645 '''Prepare a dictionary that stores all the needed directories and files to
646 add in a binary package.
648 :param config Config: The global configuration.
649 :param logger Logger: the logging instance
650 :param options OptResult: the options of the launched command
651 :param tmp_working_dir str: The temporary local directory containing some
652 specific directories or files needed in the
654 :return: the dictionary that stores all the needed directories and files to
655 add in a binary package.
656 {label : (path_on_local_machine, path_in_archive)}
660 # Get the list of product installation to add to the archive
661 l_products_name = sorted(config.APPLICATION.products.keys())
662 l_product_info = src.product.get_products_infos(l_products_name,
665 # suppress compile time products for binaries-only archives
666 if not options.sources:
667 update_config(config, logger, "compile_time", "yes")
672 l_sources_not_present = []
673 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
674 if ("APPLICATION" in config and
675 "properties" in config.APPLICATION and
676 "mesa_launcher_in_package" in config.APPLICATION.properties and
677 config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
678 generate_mesa_launcher=True
680 # first loop on products : filter products, analyse properties,
681 # and store the information that will be used to create the archive in the second loop
682 for prod_name, prod_info in l_product_info:
683 # skip product with property not_in_package set to yes
684 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
687 # Add the sources of the products that have the property
688 # sources_in_package : "yes"
689 if src.get_property_in_product_cfg(prod_info,
690 "sources_in_package") == "yes":
691 if os.path.exists(prod_info.source_dir):
692 l_source_dir.append((prod_name, prod_info.source_dir))
694 l_sources_not_present.append(prod_name)
696 # ignore the native and fixed products for install directories
697 if (src.product.product_is_native(prod_info)
698 or src.product.product_is_fixed(prod_info)
699 or not src.product.product_compiles(prod_info)):
702 # products with single_dir property will be installed in the PRODUCTS directory of the archive
703 is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
704 src.product.product_test_property(prod_info,"single_install_dir", "yes"))
705 if src.product.check_installation(config, prod_info):
706 l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir,
707 is_single_dir, prod_info.install_mode))
709 l_not_installed.append(prod_name)
711 # Add also the cpp generated modules (if any)
712 if src.product.product_is_cpp(prod_info):
714 for name_cpp in src.product.get_product_components(prod_info):
715 install_dir = os.path.join(config.APPLICATION.workdir,
716 config.INTERNAL.config.install_dir,
718 if os.path.exists(install_dir):
719 l_install_dir.append((name_cpp, name_cpp, install_dir, False, "value"))
721 l_not_installed.append(name_cpp)
723 # check the name of the directory that (could) contains the binaries
724 # from previous detar
725 binaries_from_detar = os.path.join(
726 config.APPLICATION.workdir,
727 config.INTERNAL.config.binary_dir + config.VARS.dist)
728 if os.path.exists(binaries_from_detar):
730 WARNING: existing binaries directory from previous detar installation:
732 To make new package from this, you have to:
733 1) install binaries in INSTALL directory with the script "install_bin.sh"
734 see README file for more details
735 2) or recompile everything in INSTALL with "sat compile" command
736 this step is long, and requires some linux packages to be installed
738 """ % binaries_from_detar)
740 # Print warning or error if there are some missing products
741 if len(l_not_installed) > 0:
742 text_missing_prods = ""
743 for p_name in l_not_installed:
744 text_missing_prods += " - " + p_name + "\n"
745 if not options.force_creation:
746 msg = _("ERROR: there are missing product installations:")
747 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
750 raise src.SatException(msg)
752 msg = _("WARNING: there are missing products installations:")
753 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
757 # Do the same for sources
758 if len(l_sources_not_present) > 0:
759 text_missing_prods = ""
760 for p_name in l_sources_not_present:
761 text_missing_prods += "-" + p_name + "\n"
762 if not options.force_creation:
763 msg = _("ERROR: there are missing product sources:")
764 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
767 raise src.SatException(msg)
769 msg = _("WARNING: there are missing products sources:")
770 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
774 # construct the name of the directory that will contain the binaries
775 if src.architecture.is_windows():
776 binaries_dir_name = config.INTERNAL.config.binary_dir
778 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
779 # construct the correlation table between the product names, there
780 # actual install directories and there install directory in archive
782 for prod_name, prod_info_name, install_dir, is_single_dir, install_mode in l_install_dir:
783 prod_base_name=os.path.basename(install_dir)
784 if install_mode == "base":
785 # case of a products installed in base.
786 # because the archive is in base:no mode, the name of the install dir is different inside archive
787 # we set it to the product name or by PRODUCTS if single-dir
789 prod_base_name=config.INTERNAL.config.single_install_dir
791 prod_base_name=prod_info_name
792 path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
793 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
795 for prod_name, source_dir in l_source_dir:
796 path_in_archive = os.path.join("SOURCES", prod_name)
797 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
799 # create an archives of compilation logs, and insert it into the tarball
800 logpath=os.path.join(config.APPLICATION.workdir, "LOGS")
801 path_targz_logs = os.path.join(tmp_working_dir, "logs.tgz")
802 tar_log = tarfile.open(path_targz_logs, mode='w:gz')
803 tar_log.add(logpath, arcname="LOGS")
805 d_products["LOGS"] = (path_targz_logs, "logs.tgz")
807 # for packages of SALOME applications including KERNEL,
808 # we produce a salome launcher or a virtual application (depending on salome version)
809 if 'KERNEL' in config.APPLICATION.products:
810 VersionSalome = src.get_salome_version(config)
811 # Case where SALOME has the launcher that uses the SalomeContext API
812 if VersionSalome >= 730:
813 # create the relative launcher and add it to the files to add
814 launcher_name = src.get_launcher_name(config)
815 launcher_package = produce_relative_launcher(config,
820 d_products["launcher"] = (launcher_package, launcher_name)
822 # if the application contains mesa products, we generate in addition to the
823 # classical salome launcher a launcher using mesa and called mesa_salome
824 # (the mesa launcher will be used for remote usage through ssh).
825 if generate_mesa_launcher:
826 #if there is one : store the use_mesa property
827 restore_use_mesa_option=None
828 if ('properties' in config.APPLICATION and
829 'use_mesa' in config.APPLICATION.properties):
830 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
832 # activate mesa property, and generate a mesa launcher
833 src.activate_mesa_property(config) #activate use_mesa property
834 launcher_mesa_name="mesa_"+launcher_name
835 launcher_package_mesa = produce_relative_launcher(config,
840 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
842 # if there was a use_mesa value, we restore it
843 # else we set it to the default value "no"
844 if restore_use_mesa_option != None:
845 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
847 config.APPLICATION.properties.use_mesa="no"
850 # if we mix binaries and sources, we add a copy of the launcher,
851 # prefixed with "bin",in order to avoid clashes
852 launcher_copy_name="bin"+launcher_name
853 launcher_package_copy = produce_relative_launcher(config,
858 d_products["launcher (copy)"] = (launcher_package_copy, launcher_copy_name)
860 # Provide a script for the creation of an application EDF style
861 appli_script = product_appli_creation_script(config,
866 d_products["appli script"] = (appli_script, "create_appli.py")
868 # Put also the environment file
869 env_file = produce_relative_env_files(config,
874 if src.architecture.is_windows():
875 filename = "env_launch.bat"
877 filename = "env_launch.sh"
878 d_products["environment file"] = (env_file, filename)
880 # If option exe, produce an extra launcher based on specified exe
882 exe_file = produce_relative_env_files(config,
888 if src.architecture.is_windows():
889 filename = os.path.basename(options.exe) + ".bat"
891 filename = os.path.basename(options.exe) + ".sh"
892 d_products["exe file"] = (exe_file, filename)
897 def source_package(sat, config, logger, options, tmp_working_dir):
898 '''Prepare a dictionary that stores all the needed directories and files to
899 add in a source package.
901 :param config Config: The global configuration.
902 :param logger Logger: the logging instance
903 :param options OptResult: the options of the launched command
904 :param tmp_working_dir str: The temporary local directory containing some
905 specific directories or files needed in the
907 :return: the dictionary that stores all the needed directories and files to
908 add in a source package.
909 {label : (path_on_local_machine, path_in_archive)}
914 # Get all the products that are prepared using an archive
915 # unless ftp mode is specified (in this case the user of the
916 # archive will get the sources through the ftp mode of sat prepare
918 logger.write("Find archive products ... ")
919 d_archives, l_pinfo_vcs = get_archives(config, logger)
920 logger.write("Done\n")
923 if not options.with_vcs and len(l_pinfo_vcs) > 0:
924 # Make archives with the products that are not prepared using an archive
925 # (git, cvs, svn, etc)
926 logger.write("Construct archives for vcs products ... ")
927 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
932 logger.write("Done\n")
935 logger.write("Create the project ... ")
936 d_project = create_project_for_src_package(config,
940 logger.write("Done\n")
943 tmp_sat = add_salomeTools(config, tmp_working_dir)
944 d_sat = {"salomeTools" : (tmp_sat, "sat")}
946 # Add a sat symbolic link if not win
947 if not src.architecture.is_windows():
951 # In the jobs, os.getcwd() can fail
952 t = config.LOCAL.workdir
953 os.chdir(tmp_working_dir)
955 # create a symlink, to avoid reference with "salomeTool/.."
957 if os.path.lexists("ARCHIVES"):
958 os.remove("ARCHIVES")
959 os.symlink("../ARCHIVES", "ARCHIVES")
962 d_sat["sat archive link"] = (os.path.join(tmp_working_dir,"PROJECT", "ARCHIVES"),
963 os.path.join("PROJECT", "ARCHIVES"))
965 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
968 def get_archives(config, logger):
969 '''Find all the products that are get using an archive and all the products
970 that are get using a vcs (git, cvs, svn) repository.
972 :param config Config: The global configuration.
973 :param logger Logger: the logging instance
974 :return: the dictionary {name_product :
975 (local path of its archive, path in the package of its archive )}
976 and the list of specific configuration corresponding to the vcs
980 # Get the list of product informations
981 l_products_name = config.APPLICATION.products.keys()
982 l_product_info = src.product.get_products_infos(l_products_name,
986 for p_name, p_info in l_product_info:
987 # skip product with property not_in_package set to yes
988 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
990 # ignore the native and fixed products
991 if (src.product.product_is_native(p_info)
992 or src.product.product_is_fixed(p_info)):
994 if p_info.get_source == "archive":
995 archive_path = p_info.archive_info.archive_name
996 archive_name = os.path.basename(archive_path)
997 d_archives[p_name] = (archive_path,
998 os.path.join(ARCHIVE_DIR, archive_name))
999 if (src.appli_test_property(config,"pip", "yes") and
1000 src.product.product_test_property(p_info,"pip", "yes")):
1001 # if pip mode is activated, and product is managed by pip
1002 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
1003 pip_wheel_pattern=os.path.join(pip_wheels_dir,
1004 "%s-%s*" % (p_info.name, p_info.version))
1005 pip_wheel_path=glob.glob(pip_wheel_pattern)
1006 msg_pip_not_found="Error in get_archive, pip wheel for "\
1007 "product %s-%s was not found in %s directory"
1008 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
1009 "product %s-%s were found in %s directory"
1010 if len(pip_wheel_path)==0:
1011 raise src.SatException(msg_pip_not_found %\
1012 (p_info.name, p_info.version, pip_wheels_dir))
1013 if len(pip_wheel_path)>1:
1014 raise src.SatException(msg_pip_two_or_more %\
1015 (p_info.name, p_info.version, pip_wheels_dir))
1017 pip_wheel_name=os.path.basename(pip_wheel_path[0])
1018 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0],
1019 os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
1021 # this product is not managed by archive,
1022 # an archive of the vcs directory will be created by get_archive_vcs
1023 l_pinfo_vcs.append((p_name, p_info))
1025 return d_archives, l_pinfo_vcs
1027 def add_salomeTools(config, tmp_working_dir):
1028 '''Prepare a version of salomeTools that has a specific local.pyconf file
1029 configured for a source package.
1031 :param config Config: The global configuration.
1032 :param tmp_working_dir str: The temporary local directory containing some
1033 specific directories or files needed in the
1035 :return: The path to the local salomeTools directory to add in the package
1038 # Copy sat in the temporary working directory
1039 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
1040 sat_running_path = src.Path(config.VARS.salometoolsway)
1041 sat_running_path.copy(sat_tmp_path)
1043 # Update the local.pyconf file that contains the path to the project
1044 local_pyconf_name = "local.pyconf"
1045 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
1046 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
1047 # Remove the .pyconf file in the root directory of salomeTools if there is
1048 # any. (For example when launching jobs, a pyconf file describing the jobs
1049 # can be here and is not useful)
1050 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
1051 for file_or_dir in files_or_dir_SAT:
1052 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
1053 file_path = os.path.join(tmp_working_dir,
1056 os.remove(file_path)
1058 ff = open(local_pyconf_file, "w")
1059 ff.write(LOCAL_TEMPLATE)
1062 return sat_tmp_path.path
1064 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
1065 '''For sources package that require that all products are get using an
1066 archive, one has to create some archive for the vcs products.
1067 So this method calls the clean and source command of sat and then create
1070 :param l_pinfo_vcs List: The list of specific configuration corresponding to
1072 :param sat Sat: The Sat instance that can be called to clean and source the
1074 :param config Config: The global configuration.
1075 :param logger Logger: the logging instance
1076 :param tmp_working_dir str: The temporary local directory containing some
1077 specific directories or files needed in the
1079 :return: the dictionary that stores all the archives to add in the source
1080 package. {label : (path_on_local_machine, path_in_archive)}
1083 # clean the source directory of all the vcs products, then use the source
1084 # command and thus construct an archive that will not contain the patches
1085 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
1086 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
1087 logger.write(_("\nclean sources\n"))
1088 args_clean = config.VARS.application
1089 args_clean += " --sources --products "
1090 args_clean += ",".join(l_prod_names)
1091 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
1092 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
1095 logger.write(_("get sources\n"))
1096 args_source = config.VARS.application
1097 args_source += " --products "
1098 args_source += ",".join(l_prod_names)
1099 svgDir = sat.cfg.APPLICATION.workdir
1100 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
1101 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
1102 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
1103 # DBG.write("sat config id", id(sat.cfg), True)
1104 # shit as config is not same id() as for sat.source()
1105 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
1107 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
1109 # make the new archives
1111 for pn, pinfo in l_pinfo_vcs:
1112 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
1113 logger.write("make archive vcs '%s'\n" % path_archive)
1114 d_archives_vcs[pn] = (path_archive,
1115 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
1116 sat.cfg.APPLICATION.workdir = svgDir
1117 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
1118 return d_archives_vcs
1120 def make_bin_archive(prod_name, prod_info, where):
1121 '''Create an archive of a product by searching its source directory.
1123 :param prod_name str: The name of the product.
1124 :param prod_info Config: The specific configuration corresponding to the
1126 :param where str: The path of the repository where to put the resulting
1128 :return: The path of the resulting archive
1131 path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1132 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1133 bin_path = prod_info.install_dir
1134 tar_prod.add(bin_path, arcname=path_targz_prod)
1136 return path_targz_prod
1138 def make_archive(prod_name, prod_info, where):
1139 '''Create an archive of a product by searching its source directory.
1141 :param prod_name str: The name of the product.
1142 :param prod_info Config: The specific configuration corresponding to the
1144 :param where str: The path of the repository where to put the resulting
1146 :return: The path of the resulting archive
1149 path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1150 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1151 local_path = prod_info.source_dir
1153 tar_prod.add(local_path,
1155 exclude=exclude_VCS_and_extensions_26)
1157 tar_prod.add(local_path,
1159 filter=exclude_VCS_and_extensions)
1161 return path_targz_prod
1163 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
1164 '''Create a specific project for a source package.
1166 :param config Config: The global configuration.
1167 :param tmp_working_dir str: The temporary local directory containing some
1168 specific directories or files needed in the
1170 :param with_vcs boolean: True if the package is with vcs products (not
1171 transformed into archive products)
1172 :param with_ftp boolean: True if the package use ftp servers to get archives
1173 :return: The dictionary
1174 {"project" : (produced project, project path in the archive)}
1178 # Create in the working temporary directory the full project tree
1179 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1180 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
1182 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
1185 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
1188 patches_tmp_dir = os.path.join(project_tmp_dir,
1191 application_tmp_dir = os.path.join(project_tmp_dir,
1193 for directory in [project_tmp_dir,
1194 compil_scripts_tmp_dir,
1195 env_scripts_tmp_dir,
1197 application_tmp_dir]:
1198 src.ensure_path_exists(directory)
1200 # Create the pyconf that contains the information of the project
1201 project_pyconf_name = "project.pyconf"
1202 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1203 ff = open(project_pyconf_file, "w")
1204 ff.write(PROJECT_TEMPLATE)
1205 if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1206 ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
1207 for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1208 ftp_path=ftp_path+":"+ftpserver
1210 ff.write("# ftp servers where to search for prerequisite archives\n")
1212 # add licence paths if any
1213 if len(config.PATHS.LICENCEPATH) > 0:
1214 licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1215 for path in config.PATHS.LICENCEPATH[1:]:
1216 licence_path=licence_path+":"+path
1218 ff.write("\n# Where to search for licences\n")
1219 ff.write(licence_path)
1224 # Loop over the products to get there pyconf and all the scripts
1225 # (compilation, environment, patches)
1226 # and create the pyconf file to add to the project
1227 lproducts_name = config.APPLICATION.products.keys()
1228 l_products = src.product.get_products_infos(lproducts_name, config)
1229 for p_name, p_info in l_products:
1230 # skip product with property not_in_package set to yes
1231 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1233 find_product_scripts_and_pyconf(p_name,
1237 compil_scripts_tmp_dir,
1238 env_scripts_tmp_dir,
1240 products_pyconf_tmp_dir)
1242 # for the application pyconf, we write directly the config
1243 # don't search for the original pyconf file
1244 # to avoid problems with overwrite sections and rm_products key
1245 write_application_pyconf(config, application_tmp_dir)
1247 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1250 def find_product_scripts_and_pyconf(p_name,
1254 compil_scripts_tmp_dir,
1255 env_scripts_tmp_dir,
1257 products_pyconf_tmp_dir):
1258 '''Create a specific pyconf file for a given product. Get its environment
1259 script, its compilation script and patches and put it in the temporary
1260 working directory. This method is used in the source package in order to
1261 construct the specific project.
1263 :param p_name str: The name of the product.
1264 :param p_info Config: The specific configuration corresponding to the
1266 :param config Config: The global configuration.
1267 :param with_vcs boolean: True if the package is with vcs products (not
1268 transformed into archive products)
1269 :param compil_scripts_tmp_dir str: The path to the temporary compilation
1270 scripts directory of the project.
1271 :param env_scripts_tmp_dir str: The path to the temporary environment script
1272 directory of the project.
1273 :param patches_tmp_dir str: The path to the temporary patch scripts
1274 directory of the project.
1275 :param products_pyconf_tmp_dir str: The path to the temporary product
1276 scripts directory of the project.
1279 # read the pyconf of the product
1280 product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1282 # find the compilation script if any
1283 if src.product.product_has_script(p_info):
1284 compil_script_path = src.Path(p_info.compil_script)
1285 compil_script_path.copy(compil_scripts_tmp_dir)
1287 # find the environment script if any
1288 if src.product.product_has_env_script(p_info):
1289 env_script_path = src.Path(p_info.environ.env_script)
1290 env_script_path.copy(env_scripts_tmp_dir)
1292 # find the patches if any
1293 if src.product.product_has_patches(p_info):
1294 patches = src.pyconf.Sequence()
1295 for patch_path in p_info.patches:
1296 p_path = src.Path(patch_path)
1297 p_path.copy(patches_tmp_dir)
1298 patches.append(os.path.basename(patch_path), "")
1300 if (not with_vcs) and src.product.product_is_vcs(p_info):
1301 # in non vcs mode, if the product is not archive, then make it become archive.
1303 # depending upon the incremental mode, select impacted sections
1304 if "properties" in p_info and "incremental" in p_info.properties and\
1305 p_info.properties.incremental == "yes":
1306 sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1308 sections = [p_info.section]
1309 for section in sections:
1310 if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1311 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1313 product_pyconf_cfg[section].get_source = "archive"
1314 if not "archive_info" in product_pyconf_cfg[section]:
1315 product_pyconf_cfg[section].addMapping("archive_info",
1316 src.pyconf.Mapping(product_pyconf_cfg),
1318 product_pyconf_cfg[section].archive_info.archive_name =\
1319 p_info.name + ".tgz"
1321 # save git repositories for vcs products, even if archive is not in VCS mode
1322 # in this case the user will be able to change get_source flag and work with git
1323 if src.product.product_is_vcs(p_info):
1324 # in vcs mode we must replace explicitely the git server url
1325 # (or it will not be found later because project files are not exported in archives)
1326 for section in product_pyconf_cfg:
1327 # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
1328 if "git_info" in product_pyconf_cfg[section]:
1329 for repo in product_pyconf_cfg[section].git_info:
1330 if repo in p_info.git_info:
1331 product_pyconf_cfg[section].git_info[repo] = p_info.git_info[repo]
1333 # write the pyconf file to the temporary project location
1334 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1336 ff = open(product_tmp_pyconf_path, 'w')
1337 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1338 product_pyconf_cfg.__save__(ff, 1)
1342 def write_application_pyconf(config, application_tmp_dir):
1343 '''Write the application pyconf file in the specific temporary
1344 directory containing the specific project of a source package.
1346 :param config Config: The global configuration.
1347 :param application_tmp_dir str: The path to the temporary application
1348 scripts directory of the project.
1350 application_name = config.VARS.application
1351 # write the pyconf file to the temporary application location
1352 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1353 application_name + ".pyconf")
1354 with open(application_tmp_pyconf_path, 'w') as f:
1355 f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1356 res = src.pyconf.Config()
1357 app = src.pyconf.deepCopyMapping(config.APPLICATION)
1359 # set base mode to "no" for the archive
1362 # Change the workdir
1363 app.workdir = src.pyconf.Reference(
1367 res.addMapping("APPLICATION", app, "")
1368 res.__save__(f, evaluated=False)
1371 def sat_package(config, tmp_working_dir, options, logger):
1372 '''Prepare a dictionary that stores all the needed directories and files to
1373 add in a salomeTool package.
1375 :param tmp_working_dir str: The temporary local working directory
1376 :param options OptResult: the options of the launched command
1377 :return: the dictionary that stores all the needed directories and files to
1378 add in a salomeTool package.
1379 {label : (path_on_local_machine, path_in_archive)}
1384 # we include sat himself
1385 d_project["all_sat"]=(config.VARS.salometoolsway, "")
1387 # and we overwrite local.pyconf with a clean wersion.
1388 local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1389 local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1390 local_cfg = src.pyconf.Config(local_file_path)
1391 local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1392 local_cfg.LOCAL["base"] = "default"
1393 local_cfg.LOCAL["workdir"] = "default"
1394 local_cfg.LOCAL["log_dir"] = "default"
1395 local_cfg.LOCAL["archive_dir"] = "default"
1396 local_cfg.LOCAL["VCS"] = "None"
1397 local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1399 # if the archive contains a project, we write its relative path in local.pyconf
1401 project_arch_path = os.path.join("projects", options.project,
1402 os.path.basename(options.project_file_path))
1403 local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1405 ff = open(local_pyconf_tmp_path, 'w')
1406 local_cfg.__save__(ff, 1)
1408 d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1412 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1413 '''Prepare a dictionary that stores all the needed directories and files to
1414 add in a project package.
1416 :param project_file_path str: The path to the local project.
1417 :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1418 :param tmp_working_dir str: The temporary local directory containing some
1419 specific directories or files needed in the
1421 :param embedded_in_sat boolean : the project package is embedded in a sat package
1422 :return: the dictionary that stores all the needed directories and files to
1423 add in a project package.
1424 {label : (path_on_local_machine, path_in_archive)}
1428 # Read the project file and get the directories to add to the package
1431 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1434 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1435 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1436 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1438 paths = {"APPLICATIONPATH" : "applications",
1439 "PRODUCTPATH" : "products",
1441 "MACHINEPATH" : "machines"}
1443 paths["ARCHIVEPATH"] = "archives"
1445 # Loop over the project paths and add it
1446 project_file_name = os.path.basename(project_file_path)
1448 if path not in project_pyconf_cfg:
1451 dest_path = os.path.join("projects", name_project, paths[path])
1452 project_file_dest = os.path.join("projects", name_project, project_file_name)
1454 dest_path = paths[path]
1455 project_file_dest = project_file_name
1457 # Add the directory to the files to add in the package
1458 d_project[path] = (project_pyconf_cfg[path], dest_path)
1460 # Modify the value of the path in the package
1461 project_pyconf_cfg[path] = src.pyconf.Reference(
1464 'project_path + "/' + paths[path] + '"')
1466 # Modify some values
1467 if "project_path" not in project_pyconf_cfg:
1468 project_pyconf_cfg.addMapping("project_path",
1469 src.pyconf.Mapping(project_pyconf_cfg),
1471 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1474 # we don't want to export these two fields
1475 project_pyconf_cfg.__delitem__("file_path")
1476 project_pyconf_cfg.__delitem__("PWD")
1478 project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1480 # Write the project pyconf file
1481 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1482 ff = open(project_pyconf_tmp_path, 'w')
1483 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1484 project_pyconf_cfg.__save__(ff, 1)
1486 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1490 def add_readme(config, options, where):
1491 readme_path = os.path.join(where, "README")
1492 with codecs.open(readme_path, "w", 'utf-8') as f:
1494 # templates for building the header
1496 # This package was generated with sat $version
1499 # Distribution : $dist
1501 In the following, $$ROOT represents the directory where you have installed
1502 SALOME (the directory where this file is located).
1505 if src.architecture.is_windows():
1506 readme_header = readme_header.replace('$$ROOT','%ROOT%')
1507 readme_compilation_with_binaries="""
1509 compilation based on the binaries used as prerequisites
1510 =======================================================
1512 If you fail to compile the complete application (for example because
1513 you are not root on your system and cannot install missing packages), you
1514 may try a partial compilation based on the binaries.
1515 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1516 and do some substitutions on cmake and .la files (replace the build directories
1518 The procedure to do it is:
1519 1) Remove or rename INSTALL directory if it exists
1520 2) Execute the shell script install_bin.sh:
1523 3) Use SalomeTool (as explained in Sources section) and compile only the
1524 modules you need to (with -p option)
1527 readme_header_tpl=string.Template(readme_header)
1528 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1529 "README_BIN.template")
1530 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1531 "README_LAUNCHER.template")
1532 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1533 "README_BIN_VIRTUAL_APP.template")
1534 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1535 "README_SRC.template")
1536 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1537 "README_PROJECT.template")
1538 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1539 "README_SAT.template")
1541 # prepare substitution dictionary
1543 d['user'] = config.VARS.user
1544 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1545 d['version'] = src.get_salometool_version(config)
1546 d['dist'] = config.VARS.dist
1547 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1549 if options.binaries or options.sources:
1550 d['application'] = config.VARS.application
1551 d['BINARIES'] = config.INTERNAL.config.binary_dir
1552 d['SEPARATOR'] = config.VARS.sep
1553 if src.architecture.is_windows():
1554 d['operatingSystem'] = 'Windows'
1555 d['PYTHON3'] = 'python3'
1556 d['ROOT'] = '%ROOT%'
1558 d['operatingSystem'] = 'Linux'
1561 f.write("# Application: " + d['application'] + "\n")
1562 if 'KERNEL' in config.APPLICATION.products:
1563 VersionSalome = src.get_salome_version(config)
1564 # Case where SALOME has the launcher that uses the SalomeContext API
1565 if VersionSalome >= 730:
1566 d['launcher'] = config.APPLICATION.profile.launcher_name
1568 d['virtual_app'] = 'runAppli' # this info is not used now)
1570 # write the specific sections
1571 if options.binaries:
1572 f.write(src.template.substitute(readme_template_path_bin, d))
1573 if "virtual_app" in d:
1574 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1576 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1579 f.write(src.template.substitute(readme_template_path_src, d))
1581 if options.binaries and options.sources and not src.architecture.is_windows():
1582 f.write(readme_compilation_with_binaries)
1585 f.write(src.template.substitute(readme_template_path_pro, d))
1588 f.write(src.template.substitute(readme_template_path_sat, d))
1592 def update_config(config, logger, prop, value):
1593 '''Remove from config.APPLICATION.products the products that have the property given as input.
1595 :param config Config: The global config.
1596 :param prop str: The property to filter
1597 :param value str: The value of the property to filter
1599 # if there is no APPLICATION (ex sat package -t) : nothing to do
1600 if "APPLICATION" in config:
1601 l_product_to_remove = []
1602 for product_name in config.APPLICATION.products.keys():
1603 prod_cfg = src.product.get_product_config(config, product_name)
1604 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1605 l_product_to_remove.append(product_name)
1606 for product_name in l_product_to_remove:
1607 config.APPLICATION.products.__delitem__(product_name)
1608 logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1611 '''method that is called when salomeTools is called with --help option.
1613 :return: The text to display for the package command description.
1617 The package command creates a tar file archive of a product.
1618 There are four kinds of archive, which can be mixed:
1620 1 - The binary archive.
1621 It contains the product installation directories plus a launcher.
1622 2 - The sources archive.
1623 It contains the product archives, a project (the application plus salomeTools).
1624 3 - The project archive.
1625 It contains a project (give the project file path as argument).
1626 4 - The salomeTools archive.
1627 It contains code utility salomeTools.
1630 >> sat package SALOME-master --binaries --sources""")
1632 def run(args, runner, logger):
1633 '''method that is called when salomeTools is called with package parameter.
1637 (options, args) = parser.parse_args(args)
1640 # Check that a type of package is called, and only one
1641 all_option_types = (options.binaries,
1643 options.project not in ["", None],
1645 options.bin_products)
1647 # Check if no option for package type
1648 if all_option_types.count(True) == 0:
1649 msg = _("Error: Precise a type for the package\nUse one of the "
1650 "following options: --binaries, --sources, --project or"
1651 " --salometools, --bin_products")
1652 logger.write(src.printcolors.printcError(msg), 1)
1653 logger.write("\n", 1)
1655 do_create_package = options.binaries or options.sources or options.project or options.sat
1657 if options.bin_products:
1658 ret = bin_products_archives(runner.cfg, logger, options.with_vcs)
1661 if not do_create_package:
1664 # continue to create a tar.gz package
1666 # The repository where to put the package if not Binary or Source
1667 package_default_path = runner.cfg.LOCAL.workdir
1668 # if the package contains binaries or sources:
1669 if options.binaries or options.sources or options.bin_products:
1670 # Check that the command has been called with an application
1671 src.check_config_has_application(runner.cfg)
1673 # Display information
1674 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1675 runner.cfg.VARS.application), 1)
1677 # Get the default directory where to put the packages
1678 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1679 src.ensure_path_exists(package_default_path)
1681 # if the package contains a project:
1683 # check that the project is visible by SAT
1684 projectNameFile = options.project + ".pyconf"
1686 for i in runner.cfg.PROJECTS.project_file_paths:
1687 baseName = os.path.basename(i)
1688 if baseName == projectNameFile:
1692 if foundProject is None:
1693 local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1694 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1698 Please add it in file:
1700 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1701 logger.write(src.printcolors.printcError(msg), 1)
1702 logger.write("\n", 1)
1705 options.project_file_path = foundProject
1706 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1708 # Remove the products that are filtered by the --without_properties option
1709 if options.without_properties:
1710 prop, value = options.without_properties
1711 update_config(runner.cfg, logger, prop, value)
1713 # Remove from config the products that have the not_in_package property
1714 update_config(runner.cfg, logger, "not_in_package", "yes")
1716 # get the name of the archive or build it
1718 if os.path.basename(options.name) == options.name:
1719 # only a name (not a path)
1720 archive_name = options.name
1721 dir_name = package_default_path
1723 archive_name = os.path.basename(options.name)
1724 dir_name = os.path.dirname(options.name)
1726 # suppress extension
1727 if archive_name[-len(".tgz"):] == ".tgz":
1728 archive_name = archive_name[:-len(".tgz")]
1729 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1730 archive_name = archive_name[:-len(".tar.gz")]
1734 dir_name = package_default_path
1735 if options.binaries or options.sources:
1736 archive_name = runner.cfg.APPLICATION.name
1738 if options.binaries:
1739 archive_name += "-"+runner.cfg.VARS.dist
1742 archive_name += "-SRC"
1743 if options.with_vcs:
1744 archive_name += "-VCS"
1747 archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1752 archive_name += ("satproject_" + options.project)
1754 if len(archive_name)==0: # no option worked
1755 msg = _("Error: Cannot name the archive\n"
1756 " check if at least one of the following options was "
1757 "selected : --binaries, --sources, --project or"
1759 logger.write(src.printcolors.printcError(msg), 1)
1760 logger.write("\n", 1)
1763 path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1765 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1767 # Create a working directory for all files that are produced during the
1768 # package creation and that will be removed at the end of the command
1769 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1770 src.ensure_path_exists(tmp_working_dir)
1771 logger.write("\n", 5)
1772 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1774 logger.write("\n", 3)
1776 msg = _("Preparation of files to add to the archive")
1777 logger.write(src.printcolors.printcLabel(msg), 2)
1778 logger.write("\n", 2)
1780 d_files_to_add={} # content of the archive
1782 # a dict to hold paths that will need to be substitute for users recompilations
1783 d_paths_to_substitute={}
1785 if options.binaries:
1786 d_bin_files_to_add = binary_package(runner.cfg,
1790 # for all binaries dir, store the substitution that will be required
1791 # for extra compilations
1792 for key in d_bin_files_to_add:
1793 if key.endswith("(bin)"):
1794 source_dir = d_bin_files_to_add[key][0]
1795 path_in_archive = d_bin_files_to_add[key][1].replace(
1796 runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1797 runner.cfg.INTERNAL.config.install_dir)
1798 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1799 # if basename is the same we will just substitute the dirname
1800 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1801 os.path.dirname(path_in_archive)
1803 d_paths_to_substitute[source_dir]=path_in_archive
1805 d_files_to_add.update(d_bin_files_to_add)
1807 d_files_to_add.update(source_package(runner,
1812 if options.binaries:
1813 # for archives with bin and sources we provide a shell script able to
1814 # install binaries for compilation
1815 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1817 d_paths_to_substitute,
1819 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1820 logger.write("substitutions that need to be done later : \n", 5)
1821 logger.write(str(d_paths_to_substitute), 5)
1822 logger.write("\n", 5)
1824 # --salomeTool option is not considered when --sources is selected, as this option
1825 # already brings salomeTool!
1827 d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir,
1831 DBG.write("config for package %s" % options.project, runner.cfg)
1832 d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1834 if not(d_files_to_add):
1835 msg = _("Error: Empty dictionnary to build the archive!\n")
1836 logger.write(src.printcolors.printcError(msg), 1)
1837 logger.write("\n", 1)
1840 # Add the README file in the package
1841 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1842 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1844 # Add the additional files of option add_files
1845 if options.add_files:
1846 for file_path in options.add_files:
1847 if not os.path.exists(file_path):
1848 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1850 file_name = os.path.basename(file_path)
1851 d_files_to_add[file_name] = (file_path, file_name)
1853 logger.write("\n", 2)
1854 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1855 logger.write("\n", 2)
1856 logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1860 # Creating the object tarfile
1861 tar = tarfile.open(path_targz, mode='w:gz')
1863 # get the filtering function if needed
1865 filter_function = exclude_VCS_and_extensions_26
1867 filter_function = exclude_VCS_and_extensions
1869 # Add the files to the tarfile object
1870 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1872 except KeyboardInterrupt:
1873 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1874 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1875 # remove the working directory
1876 shutil.rmtree(tmp_working_dir)
1877 logger.write(_("OK"), 1)
1878 logger.write(_("\n"), 1)
1881 # case if no application, only package sat as 'sat package -t'
1883 app = runner.cfg.APPLICATION
1887 # unconditionaly remove the tmp_local_working_dir
1889 tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1890 if os.path.isdir(tmp_local_working_dir):
1891 shutil.rmtree(tmp_local_working_dir)
1893 # remove the tmp directory, unless user has registered as developer
1894 if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1895 shutil.rmtree(tmp_working_dir)
1897 # Print again the path of the package
1898 logger.write("\n", 2)
1899 src.printcolors.print_value(logger, "Package path", path_targz, 2)