Salome HOME
style: black format
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 # -*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28 import sys
29 import src
30
31 from application import get_SALOME_modules
32 from src.versionMinorMajorPatch import MinorMajorPatch as MMP
33 import src.debug as DBG
34
35 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
36
37 BINARY = "binary"
38 SOURCE = "Source"
39 PROJECT = "Project"
40 SAT = "Sat"
41
42 ARCHIVE_DIR = "ARCHIVES"
43 PROJECT_DIR = "PROJECT"
44
45 IGNORED_DIRS = [".git", ".svn"]
46 IGNORED_EXTENSIONS = []
47
48 PACKAGE_EXT = ".tar.gz"  # the extension we use for the packages
49
50 if src.architecture.is_windows():
51     PROJECT_TEMPLATE = """#!/usr/bin/env python
52 #-*- coding:utf-8 -*-
53
54 # The path to the archive root directory
55 root_path : $PWD + "/../"
56 # path to the PROJECT
57 project_path : $PWD + "/"
58
59 # Where to search the archives of the products
60 ARCHIVEPATH : $root_path + "ARCHIVES"
61 # Where to search the pyconf of the applications
62 APPLICATIONPATH : $project_path + "applications/"
63 # Where to search the pyconf of the products
64 PRODUCTPATH : $project_path + "products/"
65 # Where to search the pyconf of the jobs of the project
66 JOBPATH : $project_path + "jobs/"
67 # Where to search the pyconf of the machines of the project
68 MACHINEPATH : $project_path + "machines/"
69 """
70 else:
71     PROJECT_TEMPLATE = """#!/usr/bin/env python
72 #-*- coding:utf-8 -*-
73
74 # path to the PROJECT
75 project_path : $PWD + "/"
76
77 # Where to search the archives of the products
78 ARCHIVEPATH : $project_path + "ARCHIVES"
79 # Where to search the pyconf of the applications
80 APPLICATIONPATH : $project_path + "applications/"
81 # Where to search the pyconf of the products
82 PRODUCTPATH : $project_path + "products/"
83 # Where to search the pyconf of the jobs of the project
84 JOBPATH : $project_path + "jobs/"
85 # Where to search the pyconf of the machines of the project
86 MACHINEPATH : $project_path + "machines/"
87 """
88
89
90 LOCAL_TEMPLATE = (
91     """#!/usr/bin/env python
92 #-*- coding:utf-8 -*-
93
94   LOCAL :
95   {
96     base : 'default'
97     workdir : 'default'
98     log_dir : 'default'
99     archive_dir : 'default'
100     VCS : 'unknown'
101     tag : 'unknown'
102   }
103
104 PROJECTS :
105 {
106   project_file_paths :
107   [
108 $LOCAL.workdir + $VARS.sep + \""""
109     + PROJECT_DIR
110     + """\" + $VARS.sep + "project.pyconf"
111   ]
112 }
113 """
114 )
115
116 # Define all possible option for the package command :  sat package <options>
117 parser = src.options.Options()
118 parser.add_option(
119     "b",
120     "binaries",
121     "boolean",
122     "binaries",
123     _("Optional: Produce a binary package."),
124     False,
125 )
126 parser.add_option(
127     "f",
128     "force_creation",
129     "boolean",
130     "force_creation",
131     _(
132         "Optional: Only binary package: produce the archive even if "
133         "there are some missing products."
134     ),
135     False,
136 )
137 parser.add_option(
138     "s",
139     "sources",
140     "boolean",
141     "sources",
142     _("Optional: Produce a compilable archive of the sources of the " "application."),
143     False,
144 )
145 parser.add_option(
146     "",
147     "bin_products",
148     "boolean",
149     "bin_products",
150     _("Optional: Create binary archives for all products."),
151     False,
152 )
153 parser.add_option(
154     "",
155     "with_vcs",
156     "boolean",
157     "with_vcs",
158     _(
159         "Optional: Do not make archive for products in VCS mode (git, cvs, svn). "
160         "Sat prepare will use VCS mode instead to retrieve them."
161         '\n          Also, when combined with "--bin_products" option, restrict the building of product archives to VCS products.'
162     ),
163     False,
164 )
165 parser.add_option(
166     "",
167     "ftp",
168     "boolean",
169     "ftp",
170     _(
171         "Optional: Do not embed archives for products in archive mode."
172         "Sat prepare will use ftp instead to retrieve them"
173     ),
174     False,
175 )
176 parser.add_option(
177     "e",
178     "exe",
179     "string",
180     "exe",
181     _("Optional: Produce an extra launcher based upon the exe given as argument."),
182     "",
183 )
184 parser.add_option(
185     "p",
186     "project",
187     "string",
188     "project",
189     _("Optional: Produce an archive that contains a project."),
190     "",
191 )
192 parser.add_option(
193     "t",
194     "salometools",
195     "boolean",
196     "sat",
197     _("Optional: Produce an archive that contains salomeTools."),
198     False,
199 )
200 parser.add_option(
201     "n",
202     "name",
203     "string",
204     "name",
205     _("Optional: The name or full path of the archive."),
206     None,
207 )
208 parser.add_option(
209     "",
210     "add_files",
211     "list2",
212     "add_files",
213     _("Optional: The list of additional files to add to the archive."),
214     [],
215 )
216 parser.add_option(
217     "",
218     "without_properties",
219     "properties",
220     "without_properties",
221     _(
222         "Optional: Filter the products by their properties.\n\tSyntax: "
223         "--without_properties <property>:<value>"
224     ),
225 )
226
227
228 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
229     """Create an archive containing all directories and files that are given in
230        the d_content argument.
231
232     :param tar tarfile: The tarfile instance used to make the archive.
233     :param name_archive str: The name of the archive to make.
234     :param d_content dict: The dictionary that contain all directories and files
235                            to add in the archive.
236                            d_content[label] =
237                                         (path_on_local_machine, path_in_archive)
238     :param logger Logger: the logging instance
239     :param f_exclude Function: the function that filters
240     :return: 0 if success, 1 if not.
241     :rtype: int
242     """
243     # get the max length of the messages in order to make the display
244     max_len = len(max(d_content.keys(), key=len))
245
246     success = 0
247     # loop over each directory or file stored in the d_content dictionary
248     names = sorted(d_content.keys())
249     DBG.write("add tar names", names)
250
251     # used to avoid duplications (for pip install in python, or single_install_dir cases)
252     already_added = set()
253     for name in names:
254         # display information
255         len_points = max_len - len(name) + 3
256         local_path, archive_path = d_content[name]
257         in_archive = os.path.join(name_archive, archive_path)
258         logger.write(name + " " + len_points * "." + " " + in_archive + " ", 3)
259         # Get the local path and the path in archive
260         # of the directory or file to add
261         # Add it in the archive
262         try:
263             key = local_path + "->" + in_archive
264             if key not in already_added:
265                 if old_python:
266                     tar.add(
267                         local_path,
268                         arcname=in_archive,
269                         exclude=exclude_VCS_and_extensions_26,
270                     )
271                 else:
272                     tar.add(
273                         local_path,
274                         arcname=in_archive,
275                         filter=exclude_VCS_and_extensions,
276                     )
277                 already_added.add(key)
278             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
279         except Exception as e:
280             logger.write(src.printcolors.printcError(_("KO ")), 3)
281             logger.write(str(e), 3)
282             success = 1
283         logger.write("\n", 3)
284     return success
285
286
287 def exclude_VCS_and_extensions_26(filename):
288     """The function that is used to exclude from package the link to the
289         VCS repositories (like .git) (only for python 2.6)
290
291     :param filename Str: The filname to exclude (or not).
292     :return: True if the file has to be exclude
293     :rtype: Boolean
294     """
295     for dir_name in IGNORED_DIRS:
296         if dir_name in filename:
297             return True
298     for extension in IGNORED_EXTENSIONS:
299         if filename.endswith(extension):
300             return True
301     return False
302
303
304 def exclude_VCS_and_extensions(tarinfo):
305     """The function that is used to exclude from package the link to the
306         VCS repositories (like .git)
307
308     :param filename Str: The filname to exclude (or not).
309     :return: None if the file has to be exclude
310     :rtype: tarinfo or None
311     """
312     filename = tarinfo.name
313     for dir_name in IGNORED_DIRS:
314         if dir_name in filename:
315             return None
316     for extension in IGNORED_EXTENSIONS:
317         if filename.endswith(extension):
318             return None
319     return tarinfo
320
321
322 def produce_relative_launcher(config, logger, file_dir, file_name, binaries_dir_name):
323     """Create a specific SALOME launcher for the binary package. This launcher
324        uses relative paths.
325
326     :param config Config: The global configuration.
327     :param logger Logger: the logging instance
328     :param file_dir str: the directory where to put the launcher
329     :param file_name str: The launcher name
330     :param binaries_dir_name str: the name of the repository where the binaries
331                                   are, in the archive.
332     :return: the path of the produced launcher
333     :rtype: str
334     """
335
336     # set base mode to "no" for the archive - save current mode to restore it at the end
337     if "base" in config.APPLICATION:
338         base_setting = config.APPLICATION.base
339     else:
340         base_setting = "maybe"
341     config.APPLICATION.base = "no"
342
343     # get KERNEL installation path
344     kernel_info = src.product.get_product_config(config, "KERNEL")
345     kernel_base_name = os.path.basename(kernel_info.install_dir)
346     if kernel_info.install_mode == "base":
347         # case of kernel installed in base. the kernel install dir name is different in the archive
348         kernel_base_name = os.path.basename(os.path.dirname(kernel_info.install_dir))
349
350     kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
351
352     # set kernel bin dir (considering fhs property)
353     kernel_cfg = src.product.get_product_config(config, "KERNEL")
354     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
355         bin_kernel_install_dir = os.path.join(kernel_root_dir, "bin")
356     else:
357         bin_kernel_install_dir = os.path.join(kernel_root_dir, "bin", "salome")
358
359     # check if the application contains an application module
360     # check also if the application has a distene product,
361     # in this case get its licence file name
362     l_product_info = src.product.get_products_infos(
363         config.APPLICATION.products.keys(), config
364     )
365     salome_application_name = "Not defined"
366     distene_licence_file_name = False
367     for prod_name, prod_info in l_product_info:
368         # look for a "salome application" and a distene product
369         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
370             distene_licence_file_name = src.product.product_has_licence(
371                 prod_info, config.PATHS.LICENCEPATH
372             )
373         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
374             salome_application_name = prod_info.name
375
376     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
377     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
378     if salome_application_name == "Not defined":
379         app_root_dir = kernel_root_dir
380     else:
381         app_root_dir = os.path.join(binaries_dir_name, salome_application_name)
382
383     additional_env = {}
384     additional_env["sat_bin_kernel_install_dir"] = (
385         "out_dir_Path + " + config.VARS.sep + bin_kernel_install_dir
386     )
387     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
388         additional_env["sat_python_version"] = 3
389     else:
390         additional_env["sat_python_version"] = 2
391
392     additional_env["ABSOLUTE_APPLI_PATH"] = (
393         "out_dir_Path" + config.VARS.sep + app_root_dir
394     )
395     launcher_name = src.get_launcher_name(config)
396     additional_env["APPLI"] = "out_dir_Path" + config.VARS.sep + file_name
397
398     # create an environment file writer
399     writer = src.environment.FileEnvWriter(
400         config, logger, file_dir, src_root=None, env_info=None
401     )
402
403     filepath = os.path.join(file_dir, file_name)
404     # Write
405     writer.write_env_file(
406         filepath,
407         False,  # for launch
408         "cfgForPy",
409         additional_env=additional_env,
410         no_path_init=False,
411         for_package=binaries_dir_name,
412     )
413
414     # Little hack to put out_dir_Path outside the strings
415     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"')
416     src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'")
417
418     # A hack to put a call to a file for distene licence.
419     # It does nothing to an application that has no distene product
420     if distene_licence_file_name:
421         logger.write(
422             "Application has a distene licence file! We use it in package launcher", 5
423         )
424         hack_for_distene_licence(filepath, distene_licence_file_name)
425
426     # change the rights in order to make the file executable for everybody
427     os.chmod(
428         filepath,
429         stat.S_IRUSR
430         | stat.S_IRGRP
431         | stat.S_IROTH
432         | stat.S_IWUSR
433         | stat.S_IXUSR
434         | stat.S_IXGRP
435         | stat.S_IXOTH,
436     )
437
438     # restore modified setting by its initial value
439     config.APPLICATION.base = base_setting
440
441     return filepath
442
443
444 def hack_for_distene_licence(filepath, licence_file):
445     """Replace the distene licence env variable by a call to a file.
446
447     :param filepath Str: The path to the launcher to modify.
448     """
449     shutil.move(filepath, filepath + "_old")
450     fileout = filepath
451     filein = filepath + "_old"
452     fin = open(filein, "r")
453     fout = open(fileout, "w")
454     text = fin.readlines()
455     # Find the Distene section
456     num_line = -1
457     for i, line in enumerate(text):
458         if "# Set DISTENE License" in line:
459             num_line = i
460             break
461     if num_line == -1:
462         # No distene product, there is nothing to do
463         fin.close()
464         for line in text:
465             fout.write(line)
466         fout.close()
467         return
468     del text[num_line + 1]
469     del text[num_line + 1]
470     text_to_insert = (
471         """    try:
472         distene_licence_file=r"%s"
473         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
474             import importlib.util
475             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
476             distene=importlib.util.module_from_spec(spec_dist)
477             spec_dist.loader.exec_module(distene)
478         else:
479             import imp
480             distene = imp.load_source('distene_licence', distene_licence_file)
481         distene.set_distene_variables(context)
482     except:
483         pass\n"""
484         % licence_file
485     )
486     text.insert(num_line + 1, text_to_insert)
487     for line in text:
488         fout.write(line)
489     fin.close()
490     fout.close()
491     return
492
493
494 def produce_relative_env_files(
495     config, logger, file_dir, binaries_dir_name, exe_name=None
496 ):
497     """Create some specific environment files for the binary package. These
498        files use relative paths.
499
500     :param config Config: The global configuration.
501     :param logger Logger: the logging instance
502     :param file_dir str: the directory where to put the files
503     :param binaries_dir_name str: the name of the repository where the binaries
504                                   are, in the archive.
505     :param exe_name str: if given generate a launcher executing exe_name
506     :return: the list of path of the produced environment files
507     :rtype: List
508     """
509
510     # set base mode to "no" for the archive - save current mode to restore it at the end
511     if "base" in config.APPLICATION:
512         base_setting = config.APPLICATION.base
513     else:
514         base_setting = "maybe"
515     config.APPLICATION.base = "no"
516
517     # create an environment file writer
518     writer = src.environment.FileEnvWriter(config, logger, file_dir, src_root=None)
519
520     if src.architecture.is_windows():
521         shell = "bat"
522         filename = "env_launch.bat"
523     else:
524         shell = "bash"
525         filename = "env_launch.sh"
526
527     if exe_name:
528         filename = os.path.basename(exe_name)
529
530     # Write
531     filepath = writer.write_env_file(
532         filename, False, shell, for_package=binaries_dir_name  # for launch
533     )
534
535     # Little hack to put out_dir_Path as environment variable
536     if src.architecture.is_windows():
537         src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%')
538         src.replace_in_file(filepath, "=out_dir_Path", "=%out_dir_Path%")
539         src.replace_in_file(filepath, ";out_dir_Path", ";%out_dir_Path%")
540     else:
541         src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}')
542         src.replace_in_file(filepath, ":out_dir_Path", ":${out_dir_Path}")
543         src.replace_in_file(filepath, ";out_dir_Path", ";${out_dir_Path}")
544
545     if exe_name:
546         if src.architecture.is_windows():
547             cmd = "\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
548         else:
549             cmd = '\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
550         with open(filepath, "a") as exe_launcher:
551             exe_launcher.write(cmd)
552
553     # change the rights in order to make the file executable for everybody
554     os.chmod(
555         filepath,
556         stat.S_IRUSR
557         | stat.S_IRGRP
558         | stat.S_IROTH
559         | stat.S_IWUSR
560         | stat.S_IXUSR
561         | stat.S_IXGRP
562         | stat.S_IXOTH,
563     )
564
565     # restore modified setting by its initial value
566     config.APPLICATION.base = base_setting
567
568     return filepath
569
570
571 def produce_install_bin_file(config, logger, file_dir, d_sub, file_name):
572     """Create a bash shell script which do substitutions in BIRARIES dir
573        in order to use it for extra compilations.
574
575     :param config Config: The global configuration.
576     :param logger Logger: the logging instance
577     :param file_dir str: the directory where to put the files
578     :param d_sub, dict: the dictionnary that contains the substitutions to be done
579     :param file_name str: the name of the install script file
580     :return: the produced file
581     :rtype: str
582     """
583     # Write
584     filepath = os.path.join(file_dir, file_name)
585     # open the file and write into it
586     # use codec utf-8 as sat variables are in unicode
587     with codecs.open(filepath, "w", "utf-8") as installbin_file:
588         installbin_template_path = os.path.join(
589             config.VARS.internal_dir, "INSTALL_BIN.template"
590         )
591
592         # build the name of the directory that will contain the binaries
593         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
594         # build the substitution loop
595         loop_cmd = "for f in $(grep -RIl"
596         for key in d_sub:
597             loop_cmd += " -e " + key
598         loop_cmd += " " + config.INTERNAL.config.install_dir + '); do\n     sed -i "\n'
599         for key in d_sub:
600             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
601         loop_cmd += '            " $f\ndone'
602
603         d = {}
604         d["BINARIES_DIR"] = binaries_dir_name
605         d["SUBSTITUTION_LOOP"] = loop_cmd
606         d["INSTALL_DIR"] = config.INTERNAL.config.install_dir
607
608         # substitute the template and write it in file
609         content = src.template.substitute(installbin_template_path, d)
610         installbin_file.write(content)
611         # change the rights in order to make the file executable for everybody
612         os.chmod(
613             filepath,
614             stat.S_IRUSR
615             | stat.S_IRGRP
616             | stat.S_IROTH
617             | stat.S_IWUSR
618             | stat.S_IXUSR
619             | stat.S_IXGRP
620             | stat.S_IXOTH,
621         )
622
623     return filepath
624
625
626 def product_appli_creation_script(config, logger, file_dir, binaries_dir_name):
627     """Create a script that can produce an application (EDF style) in the binary
628        package.
629
630     :param config Config: The global configuration.
631     :param logger Logger: the logging instance
632     :param file_dir str: the directory where to put the file
633     :param binaries_dir_name str: the name of the repository where the binaries
634                                   are, in the archive.
635     :return: the path of the produced script file
636     :rtype: Str
637     """
638     template_name = "create_appli.py.for_bin_packages.template"
639     template_path = os.path.join(config.VARS.internal_dir, template_name)
640     text_to_fill = open(template_path, "r").read()
641     text_to_fill = text_to_fill.replace("TO BE FILLED 1", '"' + binaries_dir_name + '"')
642
643     text_to_add = ""
644     for product_name in get_SALOME_modules(config):
645         product_info = src.product.get_product_config(config, product_name)
646
647         if src.product.product_is_smesh_plugin(product_info):
648             continue
649
650         if "install_dir" in product_info and bool(product_info.install_dir):
651             if src.product.product_is_cpp(product_info):
652                 # cpp module
653                 for cpp_name in src.product.get_product_components(product_info):
654                     line_to_add = (
655                         '<module name="' + cpp_name + '" gui="yes" path="\'\'\' + '
656                         'os.path.join(dir_bin_name, "' + cpp_name + "\") + '''\"/>"
657                     )
658             else:
659                 # regular module
660                 line_to_add = (
661                     '<module name="' + product_name + '" gui="yes" path="\'\'\' + '
662                     'os.path.join(dir_bin_name, "' + product_name + "\") + '''\"/>"
663                 )
664             text_to_add += line_to_add + "\n"
665
666     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
667
668     tmp_file_path = os.path.join(file_dir, "create_appli.py")
669     ff = open(tmp_file_path, "w")
670     ff.write(filled_text)
671     ff.close()
672
673     # change the rights in order to make the file executable for everybody
674     os.chmod(
675         tmp_file_path,
676         stat.S_IRUSR
677         | stat.S_IRGRP
678         | stat.S_IROTH
679         | stat.S_IWUSR
680         | stat.S_IXUSR
681         | stat.S_IXGRP
682         | stat.S_IXOTH,
683     )
684
685     return tmp_file_path
686
687
688 def bin_products_archives(config, logger, only_vcs):
689     """Prepare binary packages for all products
690     :param config Config: The global configuration.
691     :return: the error status
692     :rtype: bool
693     """
694
695     logger.write("Make %s binary archives\n" % config.VARS.dist)
696     # Get the default directory where to put the packages
697     binpackage_path = os.path.join(config.APPLICATION.workdir, "PACKAGE", "products")
698     src.ensure_path_exists(binpackage_path)
699     # Get the list of product installation to add to the archive
700     l_products_name = sorted(config.APPLICATION.products.keys())
701     l_product_info = src.product.get_products_infos(l_products_name, config)
702     # first loop on products : filter products, analyse properties,
703     # and store the information that will be used to create the archive in the second loop
704     l_not_installed = []  # store not installed products for warning at the end
705     for prod_name, prod_info in l_product_info:
706         # ignore the native and fixed products for install directories
707         if (
708             src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
709             or src.product.product_is_native(prod_info)
710             or src.product.product_is_fixed(prod_info)
711             or not src.product.product_compiles(prod_info)
712         ):
713             continue
714         if only_vcs and not src.product.product_is_vcs(prod_info):
715             continue
716         if not src.product.check_installation(config, prod_info):
717             l_not_installed.append(prod_name)
718             continue  # product is not installed, we skip it
719         # prepare call to make_bin_archive
720         path_targz_prod = os.path.join(
721             binpackage_path,
722             prod_name
723             + "-"
724             + prod_info.version.replace("/", "_")
725             + "-"
726             + config.VARS.dist
727             + PACKAGE_EXT,
728         )
729         targz_prod = tarfile.open(path_targz_prod, mode="w:gz")
730         bin_path = prod_info.install_dir
731         targz_prod.add(bin_path)
732         targz_prod.close()
733         # Python program to find MD5 hash value of a file
734         import hashlib
735
736         with open(path_targz_prod, "rb") as f:
737             bytes = f.read()  # read file as bytes
738             readable_hash = hashlib.md5(bytes).hexdigest()
739             with open(path_targz_prod + ".md5", "w") as md5sum:
740                 md5sum.write(
741                     "%s  %s" % (readable_hash, os.path.basename(path_targz_prod))
742                 )
743             logger.write(
744                 "   archive : %s   (md5sum = %s)\n" % (path_targz_prod, readable_hash)
745             )
746
747     return 0
748
749
750 def binary_package(config, logger, options, tmp_working_dir):
751     """Prepare a dictionary that stores all the needed directories and files to
752        add in a binary package.
753
754     :param config Config: The global configuration.
755     :param logger Logger: the logging instance
756     :param options OptResult: the options of the launched command
757     :param tmp_working_dir str: The temporary local directory containing some
758                                 specific directories or files needed in the
759                                 binary package
760     :return: the dictionary that stores all the needed directories and files to
761              add in a binary package.
762              {label : (path_on_local_machine, path_in_archive)}
763     :rtype: dict
764     """
765
766     # Get the list of product installation to add to the archive
767     l_products_name = sorted(config.APPLICATION.products.keys())
768     l_product_info = src.product.get_products_infos(l_products_name, config)
769
770     # suppress compile time products for binaries-only archives
771     if not options.sources:
772         update_config(config, logger, "compile_time", "yes")
773
774     l_install_dir = []
775     l_source_dir = []
776     l_not_installed = []
777     l_sources_not_present = []
778     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
779     if (
780         "APPLICATION" in config
781         and "properties" in config.APPLICATION
782         and "mesa_launcher_in_package" in config.APPLICATION.properties
783         and config.APPLICATION.properties.mesa_launcher_in_package == "yes"
784     ):
785         generate_mesa_launcher = True
786
787     # first loop on products : filter products, analyse properties,
788     # and store the information that will be used to create the archive in the second loop
789     for prod_name, prod_info in l_product_info:
790         # skip product with property not_in_package set to yes
791         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
792             continue
793
794         # Add the sources of the products that have the property
795         # sources_in_package : "yes"
796         if src.get_property_in_product_cfg(prod_info, "sources_in_package") == "yes":
797             if os.path.exists(prod_info.source_dir):
798                 l_source_dir.append((prod_name, prod_info.source_dir))
799             else:
800                 l_sources_not_present.append(prod_name)
801
802         # ignore the native and fixed products for install directories
803         if (
804             src.product.product_is_native(prod_info)
805             or src.product.product_is_fixed(prod_info)
806             or not src.product.product_compiles(prod_info)
807         ):
808             continue
809         #
810         # products with single_dir property will be installed in the PRODUCTS directory of the archive
811         is_single_dir = src.appli_test_property(
812             config, "single_install_dir", "yes"
813         ) and src.product.product_test_property(prod_info, "single_install_dir", "yes")
814         if src.product.check_installation(config, prod_info):
815             l_install_dir.append(
816                 (
817                     prod_name,
818                     prod_info.name,
819                     prod_info.install_dir,
820                     is_single_dir,
821                     prod_info.install_mode,
822                 )
823             )
824         else:
825             l_not_installed.append(prod_name)
826
827         # Add also the cpp generated modules (if any)
828         if src.product.product_is_cpp(prod_info):
829             # cpp module
830             for name_cpp in src.product.get_product_components(prod_info):
831                 install_dir = os.path.join(
832                     config.APPLICATION.workdir,
833                     config.INTERNAL.config.install_dir,
834                     name_cpp,
835                 )
836                 if os.path.exists(install_dir):
837                     l_install_dir.append(
838                         (name_cpp, name_cpp, install_dir, False, "value")
839                     )
840                 else:
841                     l_not_installed.append(name_cpp)
842
843     # check the name of the directory that (could) contains the binaries
844     # from previous detar
845     binaries_from_detar = os.path.join(
846         config.APPLICATION.workdir, config.INTERNAL.config.binary_dir + config.VARS.dist
847     )
848     if os.path.exists(binaries_from_detar):
849         logger.write(
850             """
851 WARNING: existing binaries directory from previous detar installation:
852          %s
853          To make new package from this, you have to:
854          1) install binaries in INSTALL directory with the script "install_bin.sh"
855             see README file for more details
856          2) or recompile everything in INSTALL with "sat compile" command
857             this step is long, and requires some linux packages to be installed
858             on your system\n
859 """
860             % binaries_from_detar
861         )
862
863     # Print warning or error if there are some missing products
864     if len(l_not_installed) > 0:
865         text_missing_prods = ""
866         for p_name in l_not_installed:
867             text_missing_prods += " - " + p_name + "\n"
868         if not options.force_creation:
869             msg = _("ERROR: there are missing product installations:")
870             logger.write(
871                 "%s\n%s" % (src.printcolors.printcError(msg), text_missing_prods), 1
872             )
873             raise src.SatException(msg)
874         else:
875             msg = _("WARNING: there are missing products installations:")
876             logger.write(
877                 "%s\n%s" % (src.printcolors.printcWarning(msg), text_missing_prods), 1
878             )
879
880     # Do the same for sources
881     if len(l_sources_not_present) > 0:
882         text_missing_prods = ""
883         for p_name in l_sources_not_present:
884             text_missing_prods += "-" + p_name + "\n"
885         if not options.force_creation:
886             msg = _("ERROR: there are missing product sources:")
887             logger.write(
888                 "%s\n%s" % (src.printcolors.printcError(msg), text_missing_prods), 1
889             )
890             raise src.SatException(msg)
891         else:
892             msg = _("WARNING: there are missing products sources:")
893             logger.write(
894                 "%s\n%s" % (src.printcolors.printcWarning(msg), text_missing_prods), 1
895             )
896
897     # construct the name of the directory that will contain the binaries
898     if src.architecture.is_windows():
899         binaries_dir_name = config.INTERNAL.config.binary_dir
900     else:
901         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
902     # construct the correlation table between the product names, there
903     # actual install directories and there install directory in archive
904     d_products = {}
905     for (
906         prod_name,
907         prod_info_name,
908         install_dir,
909         is_single_dir,
910         install_mode,
911     ) in l_install_dir:
912         prod_base_name = os.path.basename(install_dir)
913         if install_mode == "base":
914             # case of a products installed in base.
915             # because the archive is in base:no mode, the name of the install dir is different inside archive
916             # we set it to the product name or by PRODUCTS if single-dir
917             if is_single_dir:
918                 prod_base_name = config.INTERNAL.config.single_install_dir
919             else:
920                 prod_base_name = prod_info_name
921         path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
922         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
923
924     for prod_name, source_dir in l_source_dir:
925         path_in_archive = os.path.join("SOURCES", prod_name)
926         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
927
928     # create an archives of compilation logs, and insert it into the tarball
929     logpath = os.path.join(config.APPLICATION.workdir, "LOGS")
930     path_targz_logs = os.path.join(tmp_working_dir, "logs.tgz")
931     tar_log = tarfile.open(path_targz_logs, mode="w:gz")
932     tar_log.add(logpath, arcname="LOGS")
933     tar_log.close()
934     d_products["LOGS"] = (path_targz_logs, "logs.tgz")
935
936     # for packages of SALOME applications including KERNEL,
937     # we produce a salome launcher or a virtual application (depending on salome version)
938     if "KERNEL" in config.APPLICATION.products:
939         VersionSalome = src.get_salome_version(config)
940         # Case where SALOME has the launcher that uses the SalomeContext API
941         if VersionSalome >= MMP([7, 3, 0]):
942             # create the relative launcher and add it to the files to add
943             launcher_name = src.get_launcher_name(config)
944             launcher_package = produce_relative_launcher(
945                 config, logger, tmp_working_dir, launcher_name, binaries_dir_name
946             )
947             d_products["launcher"] = (launcher_package, launcher_name)
948
949             # if the application contains mesa products, we generate in addition to the
950             # classical salome launcher a launcher using mesa and called mesa_salome
951             # (the mesa launcher will be used for remote usage through ssh).
952             if generate_mesa_launcher:
953                 # if there is one : store the use_mesa property
954                 restore_use_mesa_option = None
955                 if (
956                     "properties" in config.APPLICATION
957                     and "use_mesa" in config.APPLICATION.properties
958                 ):
959                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
960
961                 # activate mesa property, and generate a mesa launcher
962                 src.activate_mesa_property(config)  # activate use_mesa property
963                 launcher_mesa_name = "mesa_" + launcher_name
964                 launcher_package_mesa = produce_relative_launcher(
965                     config,
966                     logger,
967                     tmp_working_dir,
968                     launcher_mesa_name,
969                     binaries_dir_name,
970                 )
971                 d_products["launcher (mesa)"] = (
972                     launcher_package_mesa,
973                     launcher_mesa_name,
974                 )
975
976                 # if there was a use_mesa value, we restore it
977                 # else we set it to the default value "no"
978                 if restore_use_mesa_option != None:
979                     config.APPLICATION.properties.use_mesa = restore_use_mesa_option
980                 else:
981                     config.APPLICATION.properties.use_mesa = "no"
982
983             if options.sources:
984                 # if we mix binaries and sources, we add a copy of the launcher,
985                 # prefixed  with "bin",in order to avoid clashes
986                 launcher_copy_name = "bin" + launcher_name
987                 launcher_package_copy = produce_relative_launcher(
988                     config,
989                     logger,
990                     tmp_working_dir,
991                     launcher_copy_name,
992                     binaries_dir_name,
993                 )
994                 d_products["launcher (copy)"] = (
995                     launcher_package_copy,
996                     launcher_copy_name,
997                 )
998         else:
999             # Provide a script for the creation of an application EDF style
1000             appli_script = product_appli_creation_script(
1001                 config, logger, tmp_working_dir, binaries_dir_name
1002             )
1003
1004             d_products["appli script"] = (appli_script, "create_appli.py")
1005
1006     # Put also the environment file
1007     env_file = produce_relative_env_files(
1008         config, logger, tmp_working_dir, binaries_dir_name
1009     )
1010
1011     if src.architecture.is_windows():
1012         filename = "env_launch.bat"
1013     else:
1014         filename = "env_launch.sh"
1015     d_products["environment file"] = (env_file, filename)
1016
1017     # If option exe, produce an extra launcher based on specified exe
1018     if options.exe:
1019         exe_file = produce_relative_env_files(
1020             config, logger, tmp_working_dir, binaries_dir_name, options.exe
1021         )
1022
1023         if src.architecture.is_windows():
1024             filename = os.path.basename(options.exe) + ".bat"
1025         else:
1026             filename = os.path.basename(options.exe) + ".sh"
1027         d_products["exe file"] = (exe_file, filename)
1028
1029     return d_products
1030
1031
1032 def source_package(sat, config, logger, options, tmp_working_dir):
1033     """Prepare a dictionary that stores all the needed directories and files to
1034        add in a source package.
1035
1036     :param config Config: The global configuration.
1037     :param logger Logger: the logging instance
1038     :param options OptResult: the options of the launched command
1039     :param tmp_working_dir str: The temporary local directory containing some
1040                                 specific directories or files needed in the
1041                                 binary package
1042     :return: the dictionary that stores all the needed directories and files to
1043              add in a source package.
1044              {label : (path_on_local_machine, path_in_archive)}
1045     :rtype: dict
1046     """
1047
1048     d_archives = {}
1049     # Get all the products that are prepared using an archive
1050     # unless ftp mode is specified (in this case the user of the
1051     # archive will get the sources through the ftp mode of sat prepare
1052     if not options.ftp:
1053         logger.write("Find archive products ... ")
1054         d_archives, l_pinfo_vcs = get_archives(config, logger)
1055         logger.write("Done\n")
1056
1057     d_archives_vcs = {}
1058     if not options.with_vcs and len(l_pinfo_vcs) > 0:
1059         # Make archives with the products that are not prepared using an archive
1060         # (git, cvs, svn, etc)
1061         logger.write("Construct archives for vcs products ... ")
1062         d_archives_vcs = get_archives_vcs(
1063             l_pinfo_vcs, sat, config, logger, tmp_working_dir
1064         )
1065         logger.write("Done\n")
1066
1067     # Create a project
1068     logger.write("Create the project ... ")
1069     d_project = create_project_for_src_package(
1070         config, tmp_working_dir, options.with_vcs, options.ftp
1071     )
1072     logger.write("Done\n")
1073
1074     # Add salomeTools
1075     tmp_sat = add_salomeTools(config, tmp_working_dir)
1076     d_sat = {"salomeTools": (tmp_sat, "sat")}
1077
1078     # Add a sat symbolic link if not win
1079     if not src.architecture.is_windows():
1080         try:
1081             t = os.getcwd()
1082         except:
1083             # In the jobs, os.getcwd() can fail
1084             t = config.LOCAL.workdir
1085         os.chdir(tmp_working_dir)
1086
1087         # create a symlink, to avoid reference with "salomeTool/.."
1088         os.chdir("PROJECT")
1089         if os.path.lexists("ARCHIVES"):
1090             os.remove("ARCHIVES")
1091         os.symlink("../ARCHIVES", "ARCHIVES")
1092         os.chdir(t)
1093
1094         d_sat["sat archive link"] = (
1095             os.path.join(tmp_working_dir, "PROJECT", "ARCHIVES"),
1096             os.path.join("PROJECT", "ARCHIVES"),
1097         )
1098
1099     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
1100     return d_source
1101
1102
1103 def get_archives(config, logger):
1104     """Find all the products that are get using an archive and all the products
1105        that are get using a vcs (git, cvs, svn) repository.
1106
1107     :param config Config: The global configuration.
1108     :param logger Logger: the logging instance
1109     :return: the dictionary {name_product :
1110              (local path of its archive, path in the package of its archive )}
1111              and the list of specific configuration corresponding to the vcs
1112              products
1113     :rtype: (Dict, List)
1114     """
1115     # Get the list of product informations
1116     l_products_name = config.APPLICATION.products.keys()
1117     l_product_info = src.product.get_products_infos(l_products_name, config)
1118     d_archives = {}
1119     l_pinfo_vcs = []
1120     for p_name, p_info in l_product_info:
1121         # skip product with property not_in_package set to yes
1122         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1123             continue
1124         # ignore the native and fixed products
1125         if src.product.product_is_native(p_info) or src.product.product_is_fixed(
1126             p_info
1127         ):
1128             continue
1129         if p_info.get_source == "archive":
1130             archive_path = p_info.archive_info.archive_name
1131             archive_name = os.path.basename(archive_path)
1132             d_archives[p_name] = (archive_path, os.path.join(ARCHIVE_DIR, archive_name))
1133             if src.appli_test_property(
1134                 config, "pip", "yes"
1135             ) and src.product.product_test_property(p_info, "pip", "yes"):
1136                 # if pip mode is activated, and product is managed by pip
1137                 pip_wheels_dir = os.path.join(config.LOCAL.archive_dir, "wheels")
1138                 if (
1139                     "archive_prefix" in p_info.archive_info
1140                     and p_info.archive_info.archive_prefix
1141                 ):
1142                     pip_wheel_pattern = os.path.join(
1143                         pip_wheels_dir,
1144                         "%s-%s*" % (p_info.archive_info.archive_prefix, p_info.version),
1145                     )
1146                 else:
1147                     pip_wheel_pattern = os.path.join(
1148                         pip_wheels_dir, "%s-%s*" % (p_info.name, p_info.version)
1149                     )
1150                 pip_wheel_path = glob.glob(pip_wheel_pattern)
1151                 msg_pip_not_found = (
1152                     "Error in get_archive, pip wheel for "
1153                     "product %s-%s was not found in %s directory"
1154                 )
1155                 msg_pip_two_or_more = (
1156                     "Error in get_archive, several pip wheels for "
1157                     "product %s-%s were found in %s directory"
1158                 )
1159                 if len(pip_wheel_path) == 0:
1160                     raise src.SatException(
1161                         msg_pip_not_found
1162                         % (p_info.name, p_info.version, pip_wheels_dir)
1163                     )
1164                 if len(pip_wheel_path) > 1:
1165                     raise src.SatException(
1166                         msg_pip_two_or_more
1167                         % (p_info.name, p_info.version, pip_wheels_dir)
1168                     )
1169
1170                 pip_wheel_name = os.path.basename(pip_wheel_path[0])
1171                 d_archives[p_name + " (pip wheel)"] = (
1172                     pip_wheel_path[0],
1173                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name),
1174                 )
1175         else:
1176             # this product is not managed by archive,
1177             # an archive of the vcs directory will be created by get_archive_vcs
1178             l_pinfo_vcs.append((p_name, p_info))
1179
1180     return d_archives, l_pinfo_vcs
1181
1182
1183 def add_salomeTools(config, tmp_working_dir):
1184     """Prepare a version of salomeTools that has a specific local.pyconf file
1185        configured for a source package.
1186
1187     :param config Config: The global configuration.
1188     :param tmp_working_dir str: The temporary local directory containing some
1189                                 specific directories or files needed in the
1190                                 source package
1191     :return: The path to the local salomeTools directory to add in the package
1192     :rtype: str
1193     """
1194     # Copy sat in the temporary working directory
1195     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
1196     sat_running_path = src.Path(config.VARS.salometoolsway)
1197     sat_running_path.copy(sat_tmp_path)
1198
1199     # Update the local.pyconf file that contains the path to the project
1200     local_pyconf_name = "local.pyconf"
1201     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
1202     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
1203     # Remove the .pyconf file in the root directory of salomeTools if there is
1204     # any. (For example when launching jobs, a pyconf file describing the jobs
1205     # can be here and is not useful)
1206     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
1207     for file_or_dir in files_or_dir_SAT:
1208         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
1209             file_path = os.path.join(tmp_working_dir, "salomeTools", file_or_dir)
1210             os.remove(file_path)
1211
1212     ff = open(local_pyconf_file, "w")
1213     ff.write(LOCAL_TEMPLATE)
1214     ff.close()
1215
1216     return sat_tmp_path.path
1217
1218
1219 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
1220     """For sources package that require that all products are get using an
1221        archive, one has to create some archive for the vcs products.
1222        So this method calls the clean and source command of sat and then create
1223        the archives.
1224
1225     :param l_pinfo_vcs List: The list of specific configuration corresponding to
1226                              each vcs product
1227     :param sat Sat: The Sat instance that can be called to clean and source the
1228                     products
1229     :param config Config: The global configuration.
1230     :param logger Logger: the logging instance
1231     :param tmp_working_dir str: The temporary local directory containing some
1232                                 specific directories or files needed in the
1233                                 source package
1234     :return: the dictionary that stores all the archives to add in the source
1235              package. {label : (path_on_local_machine, path_in_archive)}
1236     :rtype: dict
1237     """
1238     # clean the source directory of all the vcs products, then use the source
1239     # command and thus construct an archive that will not contain the patches
1240     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
1241     if False:  # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
1242         logger.write(_("\nclean sources\n"))
1243         args_clean = config.VARS.application
1244         args_clean += " --sources --products "
1245         args_clean += ",".join(l_prod_names)
1246         logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
1247         sat.clean(args_clean, batch=True, verbose=0, logger_add_link=logger)
1248     if True:
1249         # source
1250         logger.write(_("get sources\n"))
1251         args_source = config.VARS.application
1252         args_source += " --products "
1253         args_source += ",".join(l_prod_names)
1254         svgDir = sat.cfg.APPLICATION.workdir
1255         tmp_local_working_dir = os.path.join(
1256             sat.cfg.APPLICATION.workdir, "tmp_package"
1257         )  # to avoid too much big files in /tmp
1258         sat.cfg.APPLICATION.workdir = tmp_local_working_dir
1259         # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
1260         # DBG.write("sat config id", id(sat.cfg), True)
1261         # shit as config is not same id() as for sat.source()
1262         # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
1263         import source
1264
1265         source.run(args_source, sat, logger)  # use this mode as runner.cfg reference
1266
1267         # make the new archives
1268         d_archives_vcs = {}
1269         for pn, pinfo in l_pinfo_vcs:
1270             path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
1271             logger.write("make archive vcs '%s'\n" % path_archive)
1272             d_archives_vcs[pn] = (path_archive, os.path.join(ARCHIVE_DIR, pn + ".tgz"))
1273         sat.cfg.APPLICATION.workdir = svgDir
1274         # DBG.write("END sat config", sat.cfg.APPLICATION, True)
1275     return d_archives_vcs
1276
1277
1278 def make_bin_archive(prod_name, prod_info, where):
1279     """Create an archive of a product by searching its source directory.
1280
1281     :param prod_name str: The name of the product.
1282     :param prod_info Config: The specific configuration corresponding to the
1283                              product
1284     :param where str: The path of the repository where to put the resulting
1285                       archive
1286     :return: The path of the resulting archive
1287     :rtype: str
1288     """
1289     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1290     tar_prod = tarfile.open(path_targz_prod, mode="w:gz")
1291     bin_path = prod_info.install_dir
1292     tar_prod.add(bin_path, arcname=path_targz_prod)
1293     tar_prod.close()
1294     return path_targz_prod
1295
1296
1297 def make_archive(prod_name, prod_info, where):
1298     """Create an archive of a product by searching its source directory.
1299
1300     :param prod_name str: The name of the product.
1301     :param prod_info Config: The specific configuration corresponding to the
1302                              product
1303     :param where str: The path of the repository where to put the resulting
1304                       archive
1305     :return: The path of the resulting archive
1306     :rtype: str
1307     """
1308     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1309     tar_prod = tarfile.open(path_targz_prod, mode="w:gz")
1310     local_path = prod_info.source_dir
1311     if old_python:
1312         tar_prod.add(
1313             local_path, arcname=prod_name, exclude=exclude_VCS_and_extensions_26
1314         )
1315     else:
1316         tar_prod.add(local_path, arcname=prod_name, filter=exclude_VCS_and_extensions)
1317     tar_prod.close()
1318     return path_targz_prod
1319
1320
1321 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
1322     """Create a specific project for a source package.
1323
1324     :param config Config: The global configuration.
1325     :param tmp_working_dir str: The temporary local directory containing some
1326                                 specific directories or files needed in the
1327                                 source package
1328     :param with_vcs boolean: True if the package is with vcs products (not
1329                              transformed into archive products)
1330     :param with_ftp boolean: True if the package use ftp servers to get archives
1331     :return: The dictionary
1332              {"project" : (produced project, project path in the archive)}
1333     :rtype: Dict
1334     """
1335
1336     # Create in the working temporary directory the full project tree
1337     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1338     products_pyconf_tmp_dir = os.path.join(project_tmp_dir, "products")
1339     compil_scripts_tmp_dir = os.path.join(project_tmp_dir, "products", "compil_scripts")
1340     post_scripts_tmp_dir = os.path.join(project_tmp_dir, "products", "post_scripts")
1341     env_scripts_tmp_dir = os.path.join(project_tmp_dir, "products", "env_scripts")
1342     patches_tmp_dir = os.path.join(project_tmp_dir, "products", "patches")
1343     application_tmp_dir = os.path.join(project_tmp_dir, "applications")
1344     for directory in [
1345         project_tmp_dir,
1346         compil_scripts_tmp_dir,
1347         env_scripts_tmp_dir,
1348         post_scripts_tmp_dir,
1349         patches_tmp_dir,
1350         application_tmp_dir,
1351     ]:
1352         src.ensure_path_exists(directory)
1353
1354     # Create the pyconf that contains the information of the project
1355     project_pyconf_name = "project.pyconf"
1356     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1357     ff = open(project_pyconf_file, "w")
1358     ff.write(PROJECT_TEMPLATE)
1359     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1360         ftp_path = 'ARCHIVEFTP : "' + config.PATHS.ARCHIVEFTP[0]
1361         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1362             ftp_path = ftp_path + ":" + ftpserver
1363         ftp_path += '"'
1364         ff.write("# ftp servers where to search for prerequisite archives\n")
1365         ff.write(ftp_path)
1366     # add licence paths if any
1367     if len(config.PATHS.LICENCEPATH) > 0:
1368         licence_path = 'LICENCEPATH : "' + config.PATHS.LICENCEPATH[0]
1369         for path in config.PATHS.LICENCEPATH[1:]:
1370             licence_path = licence_path + ":" + path
1371         licence_path += '"'
1372         ff.write("\n# Where to search for licences\n")
1373         ff.write(licence_path)
1374
1375     ff.close()
1376
1377     # Loop over the products to get there pyconf and all the scripts
1378     # (compilation, environment, patches)
1379     # and create the pyconf file to add to the project
1380     lproducts_name = config.APPLICATION.products.keys()
1381     l_products = src.product.get_products_infos(lproducts_name, config)
1382     for p_name, p_info in l_products:
1383         # skip product with property not_in_package set to yes
1384         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1385             continue
1386         find_product_scripts_and_pyconf(
1387             p_name,
1388             p_info,
1389             config,
1390             with_vcs,
1391             compil_scripts_tmp_dir,
1392             env_scripts_tmp_dir,
1393             post_scripts_tmp_dir,
1394             patches_tmp_dir,
1395             products_pyconf_tmp_dir,
1396         )
1397
1398     # for the application pyconf, we write directly the config
1399     # don't search for the original pyconf file
1400     # to avoid problems with overwrite sections and rm_products key
1401     write_application_pyconf(config, application_tmp_dir)
1402
1403     d_project = {"project": (project_tmp_dir, PROJECT_DIR)}
1404     return d_project
1405
1406
1407 def find_product_scripts_and_pyconf(
1408     p_name,
1409     p_info,
1410     config,
1411     with_vcs,
1412     compil_scripts_tmp_dir,
1413     env_scripts_tmp_dir,
1414     post_scripts_tmp_dir,
1415     patches_tmp_dir,
1416     products_pyconf_tmp_dir,
1417 ):
1418     """Create a specific pyconf file for a given product. Get its environment
1419        script, its compilation script and patches and put it in the temporary
1420        working directory. This method is used in the source package in order to
1421        construct the specific project.
1422
1423     :param p_name str: The name of the product.
1424     :param p_info Config: The specific configuration corresponding to the
1425                              product
1426     :param config Config: The global configuration.
1427     :param with_vcs boolean: True if the package is with vcs products (not
1428                              transformed into archive products)
1429     :param compil_scripts_tmp_dir str: The path to the temporary compilation
1430                                        scripts directory of the project.
1431     :param env_scripts_tmp_dir str: The path to the temporary environment script
1432                                     directory of the project.
1433     :param post_scripts_tmp_dir str: The path to the temporary post-processing script
1434                                     directory of the project.
1435     :param patches_tmp_dir str: The path to the temporary patch scripts
1436                                 directory of the project.
1437     :param products_pyconf_tmp_dir str: The path to the temporary product
1438                                         scripts directory of the project.
1439     """
1440
1441     # read the pyconf of the product
1442     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1443
1444     # find the compilation script if any
1445     if src.product.product_has_script(p_info):
1446         compil_script_path = src.Path(p_info.compil_script)
1447         compil_script_path.copy(compil_scripts_tmp_dir)
1448
1449     # find the environment script if any
1450     if src.product.product_has_env_script(p_info):
1451         env_script_path = src.Path(p_info.environ.env_script)
1452         env_script_path.copy(env_scripts_tmp_dir)
1453
1454     # find the post script if any
1455     if src.product.product_has_post_script(p_info):
1456         post_script_path = src.Path(p_info.post_script)
1457         post_script_path.copy(post_scripts_tmp_dir)
1458
1459     # find the patches if any
1460     if src.product.product_has_patches(p_info):
1461         patches = src.pyconf.Sequence()
1462         for patch_path in p_info.patches:
1463             p_path = src.Path(patch_path)
1464             p_path.copy(patches_tmp_dir)
1465             patches.append(os.path.basename(patch_path), "")
1466
1467     if (not with_vcs) and src.product.product_is_vcs(p_info):
1468         # in non vcs mode, if the product is not archive, then make it become archive.
1469
1470         # depending upon the incremental mode, select impacted sections
1471         if (
1472             "properties" in p_info
1473             and "incremental" in p_info.properties
1474             and p_info.properties.incremental == "yes"
1475         ):
1476             sections = [
1477                 "default",
1478                 "default_win",
1479                 p_info.section,
1480                 p_info.section + "_win",
1481             ]
1482         else:
1483             sections = [p_info.section]
1484         for section in sections:
1485             if (
1486                 section in product_pyconf_cfg
1487                 and "get_source" in product_pyconf_cfg[section]
1488             ):
1489                 DBG.write(
1490                     "sat package set archive mode to archive for product %s and section %s"
1491                     % (p_name, section)
1492                 )
1493                 product_pyconf_cfg[section].get_source = "archive"
1494                 if not "archive_info" in product_pyconf_cfg[section]:
1495                     product_pyconf_cfg[section].addMapping(
1496                         "archive_info", src.pyconf.Mapping(product_pyconf_cfg), ""
1497                     )
1498                     product_pyconf_cfg[section].archive_info.archive_name = (
1499                         p_info.name + ".tgz"
1500                     )
1501
1502     # save git repositories for vcs products, even if archive is not in VCS mode
1503     # in this case the user will be able to change get_source flag and work with git
1504     if src.product.product_is_vcs(p_info):
1505         # in vcs mode we must replace explicitely the git server url
1506         # (or it will not be found later because project files are not exported in archives)
1507         for section in product_pyconf_cfg:
1508             # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
1509             if "git_info" in product_pyconf_cfg[section]:
1510                 for repo in product_pyconf_cfg[section].git_info:
1511                     if repo in p_info.git_info:
1512                         product_pyconf_cfg[section].git_info[repo] = p_info.git_info[
1513                             repo
1514                         ]
1515
1516     # write the pyconf file to the temporary project location
1517     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir, p_name + ".pyconf")
1518     ff = open(product_tmp_pyconf_path, "w")
1519     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1520     product_pyconf_cfg.__save__(ff, 1)
1521     ff.close()
1522
1523
1524 def write_application_pyconf(config, application_tmp_dir):
1525     """Write the application pyconf file in the specific temporary
1526        directory containing the specific project of a source package.
1527
1528     :param config Config: The global configuration.
1529     :param application_tmp_dir str: The path to the temporary application
1530                                     scripts directory of the project.
1531     """
1532     application_name = config.VARS.application
1533     # write the pyconf file to the temporary application location
1534     application_tmp_pyconf_path = os.path.join(
1535         application_tmp_dir, application_name + ".pyconf"
1536     )
1537     with open(application_tmp_pyconf_path, "w") as f:
1538         f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1539         res = src.pyconf.Config()
1540         app = src.pyconf.deepCopyMapping(config.APPLICATION)
1541
1542         # set base mode to "no" for the archive
1543         app.base = "no"
1544
1545         # Change the workdir
1546         app.workdir = src.pyconf.Reference(app, src.pyconf.DOLLAR, "LOCAL.workdir")
1547         res.addMapping("APPLICATION", app, "")
1548         res.__save__(f, evaluated=False)
1549
1550
1551 def sat_package(config, tmp_working_dir, options, logger):
1552     """Prepare a dictionary that stores all the needed directories and files to
1553        add in a salomeTool package.
1554
1555     :param tmp_working_dir str: The temporary local working directory
1556     :param options OptResult: the options of the launched command
1557     :return: the dictionary that stores all the needed directories and files to
1558              add in a salomeTool package.
1559              {label : (path_on_local_machine, path_in_archive)}
1560     :rtype: dict
1561     """
1562     d_project = {}
1563
1564     # we include sat himself
1565     d_project["all_sat"] = (config.VARS.salometoolsway, "")
1566
1567     # and we overwrite local.pyconf with a clean wersion.
1568     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1569     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1570     local_cfg = src.pyconf.Config(local_file_path)
1571     local_cfg.PROJECTS.project_file_paths = src.pyconf.Sequence(local_cfg.PROJECTS)
1572     local_cfg.LOCAL["base"] = "default"
1573     local_cfg.LOCAL["workdir"] = "default"
1574     local_cfg.LOCAL["log_dir"] = "default"
1575     local_cfg.LOCAL["archive_dir"] = "default"
1576     local_cfg.LOCAL["VCS"] = "None"
1577     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1578
1579     # if the archive contains a project, we write its relative path in local.pyconf
1580     if options.project:
1581         project_arch_path = os.path.join(
1582             "projects", options.project, os.path.basename(options.project_file_path)
1583         )
1584         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1585
1586     ff = open(local_pyconf_tmp_path, "w")
1587     local_cfg.__save__(ff, 1)
1588     ff.close()
1589     d_project["local.pyconf"] = (local_pyconf_tmp_path, "data/local.pyconf")
1590     return d_project
1591
1592
1593 def project_package(
1594     config,
1595     name_project,
1596     project_file_path,
1597     ftp_mode,
1598     tmp_working_dir,
1599     embedded_in_sat,
1600     logger,
1601 ):
1602     """Prepare a dictionary that stores all the needed directories and files to
1603        add in a project package.
1604
1605     :param project_file_path str: The path to the local project.
1606     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1607     :param tmp_working_dir str: The temporary local directory containing some
1608                                 specific directories or files needed in the
1609                                 project package
1610     :param embedded_in_sat boolean : the project package is embedded in a sat package
1611     :return: the dictionary that stores all the needed directories and files to
1612              add in a project package.
1613              {label : (path_on_local_machine, path_in_archive)}
1614     :rtype: dict
1615     """
1616     d_project = {}
1617     # Read the project file and get the directories to add to the package
1618
1619     try:
1620         project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1621     except:
1622         logger.write(
1623             """
1624 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n"""
1625             % (name_project, project_file_path)
1626         )
1627         project_pyconf_cfg = src.pyconf.Config(project_file_path)
1628         project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1629
1630     paths = {
1631         "APPLICATIONPATH": "applications",
1632         "PRODUCTPATH": "products",
1633         "JOBPATH": "jobs",
1634         "MACHINEPATH": "machines",
1635     }
1636     if not ftp_mode:
1637         paths["ARCHIVEPATH"] = "archives"
1638
1639     # Loop over the project paths and add it
1640     project_file_name = os.path.basename(project_file_path)
1641     for path in paths:
1642         if path not in project_pyconf_cfg:
1643             continue
1644         if embedded_in_sat:
1645             dest_path = os.path.join("projects", name_project, paths[path])
1646             project_file_dest = os.path.join(
1647                 "projects", name_project, project_file_name
1648             )
1649         else:
1650             dest_path = paths[path]
1651             project_file_dest = project_file_name
1652
1653         # Add the directory to the files to add in the package
1654         d_project[path] = (project_pyconf_cfg[path], dest_path)
1655
1656         # Modify the value of the path in the package
1657         project_pyconf_cfg[path] = src.pyconf.Reference(
1658             project_pyconf_cfg,
1659             src.pyconf.DOLLAR,
1660             'project_path + "/' + paths[path] + '"',
1661         )
1662
1663     # Modify some values
1664     if "project_path" not in project_pyconf_cfg:
1665         project_pyconf_cfg.addMapping(
1666             "project_path", src.pyconf.Mapping(project_pyconf_cfg), ""
1667         )
1668     project_pyconf_cfg.project_path = src.pyconf.Reference(
1669         project_pyconf_cfg, src.pyconf.DOLLAR, "PWD"
1670     )
1671     # we don't want to export these two fields
1672     project_pyconf_cfg.__delitem__("file_path")
1673     project_pyconf_cfg.__delitem__("PWD")
1674     if ftp_mode:
1675         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1676
1677     # Write the project pyconf file
1678     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1679     ff = open(project_pyconf_tmp_path, "w")
1680     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1681     project_pyconf_cfg.__save__(ff, 1)
1682     ff.close()
1683     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1684
1685     return d_project
1686
1687
1688 def add_readme(config, options, where):
1689     readme_path = os.path.join(where, "README")
1690     with codecs.open(readme_path, "w", "utf-8") as f:
1691
1692         # templates for building the header
1693         readme_header = """
1694 # This package was generated with sat $version
1695 # Date: $date
1696 # User: $user
1697 # Distribution : $dist
1698
1699 In the following, $$ROOT represents the directory where you have installed
1700 SALOME (the directory where this file is located).
1701
1702 """
1703         if src.architecture.is_windows():
1704             readme_header = readme_header.replace("$$ROOT", "%ROOT%")
1705         readme_compilation_with_binaries = """
1706
1707 compilation based on the binaries used as prerequisites
1708 =======================================================
1709
1710 If you fail to compile the complete application (for example because
1711 you are not root on your system and cannot install missing packages), you
1712 may try a partial compilation based on the binaries.
1713 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1714 and do some substitutions on cmake and .la files (replace the build directories
1715 with local paths).
1716 The procedure to do it is:
1717  1) Remove or rename INSTALL directory if it exists
1718  2) Execute the shell script install_bin.sh:
1719  > cd $ROOT
1720  > ./install_bin.sh
1721  3) Use SalomeTool (as explained in Sources section) and compile only the
1722     modules you need to (with -p option)
1723
1724 """
1725         readme_header_tpl = string.Template(readme_header)
1726         readme_template_path_bin = os.path.join(
1727             config.VARS.internal_dir, "README_BIN.template"
1728         )
1729         readme_template_path_bin_launcher = os.path.join(
1730             config.VARS.internal_dir, "README_LAUNCHER.template"
1731         )
1732         readme_template_path_bin_virtapp = os.path.join(
1733             config.VARS.internal_dir, "README_BIN_VIRTUAL_APP.template"
1734         )
1735         readme_template_path_src = os.path.join(
1736             config.VARS.internal_dir, "README_SRC.template"
1737         )
1738         readme_template_path_pro = os.path.join(
1739             config.VARS.internal_dir, "README_PROJECT.template"
1740         )
1741         readme_template_path_sat = os.path.join(
1742             config.VARS.internal_dir, "README_SAT.template"
1743         )
1744
1745         # prepare substitution dictionary
1746         d = dict()
1747         d["user"] = config.VARS.user
1748         d["date"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1749         d["version"] = src.get_salometool_version(config)
1750         d["dist"] = config.VARS.dist
1751         f.write(readme_header_tpl.substitute(d))  # write the general header (common)
1752
1753         if options.binaries or options.sources:
1754             d["application"] = config.VARS.application
1755             d["BINARIES"] = config.INTERNAL.config.binary_dir
1756             d["SEPARATOR"] = config.VARS.sep
1757             if src.architecture.is_windows():
1758                 d["operatingSystem"] = "Windows"
1759                 d["PYTHON3"] = "python3"
1760                 d["ROOT"] = "%ROOT%"
1761             else:
1762                 d["operatingSystem"] = "Linux"
1763                 d["PYTHON3"] = ""
1764                 d["ROOT"] = "$ROOT"
1765             f.write("# Application: " + d["application"] + "\n")
1766             if "KERNEL" in config.APPLICATION.products:
1767                 VersionSalome = src.get_salome_version(config)
1768                 # Case where SALOME has the launcher that uses the SalomeContext API
1769                 if VersionSalome >= MMP([7, 3, 0]):
1770                     d["launcher"] = config.APPLICATION.profile.launcher_name
1771                 else:
1772                     d["virtual_app"] = "runAppli"  # this info is not used now)
1773
1774         # write the specific sections
1775         if options.binaries:
1776             f.write(src.template.substitute(readme_template_path_bin, d))
1777             if "virtual_app" in d:
1778                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1779             if "launcher" in d:
1780                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1781
1782         if options.sources:
1783             f.write(src.template.substitute(readme_template_path_src, d))
1784
1785         if options.binaries and options.sources and not src.architecture.is_windows():
1786             f.write(readme_compilation_with_binaries)
1787
1788         if options.project:
1789             f.write(src.template.substitute(readme_template_path_pro, d))
1790
1791         if options.sat:
1792             f.write(src.template.substitute(readme_template_path_sat, d))
1793
1794     return readme_path
1795
1796
1797 def update_config(config, logger, prop, value):
1798     """Remove from config.APPLICATION.products the products that have the property given as input.
1799
1800     :param config Config: The global config.
1801     :param prop str: The property to filter
1802     :param value str: The value of the property to filter
1803     """
1804     # if there is no APPLICATION (ex sat package -t) : nothing to do
1805     if "APPLICATION" in config:
1806         l_product_to_remove = []
1807         for product_name in config.APPLICATION.products.keys():
1808             prod_cfg = src.product.get_product_config(config, product_name)
1809             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1810                 l_product_to_remove.append(product_name)
1811         for product_name in l_product_to_remove:
1812             config.APPLICATION.products.__delitem__(product_name)
1813             logger.write(
1814                 "Remove product %s with property %s\n" % (product_name, prop), 5
1815             )
1816
1817
1818 def description():
1819     """method that is called when salomeTools is called with --help option.
1820
1821     :return: The text to display for the package command description.
1822     :rtype: str
1823     """
1824     return _(
1825         """
1826 The package command creates a tar file archive of a product.
1827 There are four kinds of archive, which can be mixed:
1828
1829  1 - The binary archive.
1830      It contains the product installation directories plus a launcher.
1831  2 - The sources archive.
1832      It contains the product archives, a project (the application plus salomeTools).
1833  3 - The project archive.
1834      It contains a project (give the project file path as argument).
1835  4 - The salomeTools archive.
1836      It contains code utility salomeTools.
1837
1838 example:
1839  >> sat package SALOME-master --binaries --sources"""
1840     )
1841
1842
1843 def run(args, runner, logger):
1844     """method that is called when salomeTools is called with package parameter."""
1845
1846     # Parse the options
1847     (options, args) = parser.parse_args(args)
1848
1849     # Check that a type of package is called, and only one
1850     all_option_types = (
1851         options.binaries,
1852         options.sources,
1853         options.project not in ["", None],
1854         options.sat,
1855         options.bin_products,
1856     )
1857
1858     # Check if no option for package type
1859     if all_option_types.count(True) == 0:
1860         msg = _(
1861             "Error: Precise a type for the package\nUse one of the "
1862             "following options: --binaries, --sources, --project or"
1863             " --salometools, --bin_products"
1864         )
1865         logger.write(src.printcolors.printcError(msg), 1)
1866         logger.write("\n", 1)
1867         return 1
1868     do_create_package = (
1869         options.binaries or options.sources or options.project or options.sat
1870     )
1871
1872     if options.bin_products:
1873         ret = bin_products_archives(runner.cfg, logger, options.with_vcs)
1874         if ret != 0:
1875             return ret
1876     if not do_create_package:
1877         return 0
1878
1879     # continue to create a tar.gz package
1880
1881     # The repository where to put the package if not Binary or Source
1882     package_default_path = runner.cfg.LOCAL.workdir
1883     # if the package contains binaries or sources:
1884     if options.binaries or options.sources or options.bin_products:
1885         # Check that the command has been called with an application
1886         src.check_config_has_application(runner.cfg)
1887
1888         # Display information
1889         logger.write(
1890             _("Packaging application %s\n")
1891             % src.printcolors.printcLabel(runner.cfg.VARS.application),
1892             1,
1893         )
1894
1895         # Get the default directory where to put the packages
1896         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1897         src.ensure_path_exists(package_default_path)
1898
1899     # if the package contains a project:
1900     if options.project:
1901         # check that the project is visible by SAT
1902         projectNameFile = options.project + ".pyconf"
1903         foundProject = None
1904         for i in runner.cfg.PROJECTS.project_file_paths:
1905             baseName = os.path.basename(i)
1906             if baseName == projectNameFile:
1907                 foundProject = i
1908                 break
1909
1910         if foundProject is None:
1911             local_path = os.path.join(
1912                 runner.cfg.VARS.salometoolsway, "data", "local.pyconf"
1913             )
1914             msg = _(
1915                 """ERROR: the project %(1)s is not visible by salomeTools.
1916 known projects are:
1917 %(2)s
1918
1919 Please add it in file:
1920 %(3)s"""
1921                 % {
1922                     "1": options.project,
1923                     "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths),
1924                     "3": local_path,
1925                 }
1926             )
1927             logger.write(src.printcolors.printcError(msg), 1)
1928             logger.write("\n", 1)
1929             return 1
1930         else:
1931             options.project_file_path = foundProject
1932             src.printcolors.print_value(
1933                 logger, "Project path", options.project_file_path, 2
1934             )
1935
1936     # Remove the products that are filtered by the --without_properties option
1937     if options.without_properties:
1938         prop, value = options.without_properties
1939         update_config(runner.cfg, logger, prop, value)
1940
1941     # Remove from config the products that have the not_in_package property
1942     update_config(runner.cfg, logger, "not_in_package", "yes")
1943
1944     # get the name of the archive or build it
1945     if options.name:
1946         if os.path.basename(options.name) == options.name:
1947             # only a name (not a path)
1948             archive_name = options.name
1949             dir_name = package_default_path
1950         else:
1951             archive_name = os.path.basename(options.name)
1952             dir_name = os.path.dirname(options.name)
1953
1954         # suppress extension
1955         if archive_name[-len(".tgz") :] == ".tgz":
1956             archive_name = archive_name[: -len(".tgz")]
1957         if archive_name[-len(".tar.gz") :] == ".tar.gz":
1958             archive_name = archive_name[: -len(".tar.gz")]
1959
1960     else:
1961         archive_name = ""
1962         dir_name = package_default_path
1963         if options.binaries or options.sources:
1964             archive_name = runner.cfg.APPLICATION.name
1965
1966         if options.binaries:
1967             archive_name += "-" + runner.cfg.VARS.dist
1968
1969         if options.sources:
1970             archive_name += "-SRC"
1971             if options.with_vcs:
1972                 archive_name += "-VCS"
1973
1974         if options.sat:
1975             archive_name += "salomeTools_" + src.get_salometool_version(runner.cfg)
1976
1977         if options.project:
1978             if options.sat:
1979                 archive_name += "_"
1980             archive_name += "satproject_" + options.project
1981
1982         if len(archive_name) == 0:  # no option worked
1983             msg = _(
1984                 "Error: Cannot name the archive\n"
1985                 " check if at least one of the following options was "
1986                 "selected : --binaries, --sources, --project or"
1987                 " --salometools"
1988             )
1989             logger.write(src.printcolors.printcError(msg), 1)
1990             logger.write("\n", 1)
1991             return 1
1992
1993     path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1994
1995     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1996
1997     # Create a working directory for all files that are produced during the
1998     # package creation and that will be removed at the end of the command
1999     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
2000     src.ensure_path_exists(tmp_working_dir)
2001     logger.write("\n", 5)
2002     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir), 5)
2003
2004     logger.write("\n", 3)
2005
2006     msg = _("Preparation of files to add to the archive")
2007     logger.write(src.printcolors.printcLabel(msg), 2)
2008     logger.write("\n", 2)
2009
2010     d_files_to_add = {}  # content of the archive
2011
2012     # a dict to hold paths that will need to be substitute for users recompilations
2013     d_paths_to_substitute = {}
2014
2015     if options.binaries:
2016         d_bin_files_to_add = binary_package(
2017             runner.cfg, logger, options, tmp_working_dir
2018         )
2019         # for all binaries dir, store the substitution that will be required
2020         # for extra compilations
2021         for key in d_bin_files_to_add:
2022             if key.endswith("(bin)"):
2023                 source_dir = d_bin_files_to_add[key][0]
2024                 path_in_archive = d_bin_files_to_add[key][1].replace(
2025                     runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
2026                     runner.cfg.INTERNAL.config.install_dir,
2027                 )
2028                 if os.path.basename(source_dir) == os.path.basename(path_in_archive):
2029                     # if basename is the same we will just substitute the dirname
2030                     d_paths_to_substitute[
2031                         os.path.dirname(source_dir)
2032                     ] = os.path.dirname(path_in_archive)
2033                 else:
2034                     d_paths_to_substitute[source_dir] = path_in_archive
2035
2036         d_files_to_add.update(d_bin_files_to_add)
2037     if options.sources:
2038         d_files_to_add.update(
2039             source_package(runner, runner.cfg, logger, options, tmp_working_dir)
2040         )
2041         if options.binaries:
2042             # for archives with bin and sources we provide a shell script able to
2043             # install binaries for compilation
2044             file_install_bin = produce_install_bin_file(
2045                 runner.cfg,
2046                 logger,
2047                 tmp_working_dir,
2048                 d_paths_to_substitute,
2049                 "install_bin.sh",
2050             )
2051             d_files_to_add.update({"install_bin": (file_install_bin, "install_bin.sh")})
2052             logger.write("substitutions that need to be done later : \n", 5)
2053             logger.write(str(d_paths_to_substitute), 5)
2054             logger.write("\n", 5)
2055     else:
2056         # --salomeTool option is not considered when --sources is selected, as this option
2057         # already brings salomeTool!
2058         if options.sat:
2059             d_files_to_add.update(
2060                 sat_package(runner.cfg, tmp_working_dir, options, logger)
2061             )
2062
2063     if options.project:
2064         DBG.write("config for package %s" % options.project, runner.cfg)
2065         d_files_to_add.update(
2066             project_package(
2067                 runner.cfg,
2068                 options.project,
2069                 options.project_file_path,
2070                 options.ftp,
2071                 tmp_working_dir,
2072                 options.sat,
2073                 logger,
2074             )
2075         )
2076
2077     if not (d_files_to_add):
2078         msg = _("Error: Empty dictionnary to build the archive!\n")
2079         logger.write(src.printcolors.printcError(msg), 1)
2080         logger.write("\n", 1)
2081         return 1
2082
2083     # Add the README file in the package
2084     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
2085     d_files_to_add["README"] = (local_readme_tmp_path, "README")
2086
2087     # Add the additional files of option add_files
2088     if options.add_files:
2089         for file_path in options.add_files:
2090             if not os.path.exists(file_path):
2091                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
2092                 continue
2093             file_name = os.path.basename(file_path)
2094             d_files_to_add[file_name] = (file_path, file_name)
2095
2096     logger.write("\n", 2)
2097     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
2098     logger.write("\n", 2)
2099     logger.write(
2100         "\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5
2101     )
2102
2103     res = 0
2104     try:
2105         # Creating the object tarfile
2106         tar = tarfile.open(path_targz, mode="w:gz")
2107
2108         # get the filtering function if needed
2109         if old_python:
2110             filter_function = exclude_VCS_and_extensions_26
2111         else:
2112             filter_function = exclude_VCS_and_extensions
2113
2114         # Add the files to the tarfile object
2115         res = add_files(
2116             tar, archive_name, d_files_to_add, logger, f_exclude=filter_function
2117         )
2118         tar.close()
2119     except KeyboardInterrupt:
2120         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
2121         logger.write(
2122             _("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1
2123         )
2124         # remove the working directory
2125         shutil.rmtree(tmp_working_dir)
2126         logger.write(_("OK"), 1)
2127         logger.write(_("\n"), 1)
2128         return 1
2129
2130     # case if no application, only package sat as 'sat package -t'
2131     try:
2132         app = runner.cfg.APPLICATION
2133     except:
2134         app = None
2135
2136     # unconditionaly remove the tmp_local_working_dir
2137     if app is not None:
2138         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
2139         if os.path.isdir(tmp_local_working_dir):
2140             shutil.rmtree(tmp_local_working_dir)
2141
2142     # remove the tmp directory, unless user has registered as developer
2143     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
2144         shutil.rmtree(tmp_working_dir)
2145
2146     # Print again the path of the package
2147     logger.write("\n", 2)
2148     src.printcolors.print_value(logger, "Package path", path_targz, 2)
2149
2150     return res