Salome HOME
documentation, option de sat package --with_vcs en combinaison avec --bin_products
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28 import sys
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
35
36 BINARY = "binary"
37 SOURCE = "Source"
38 PROJECT = "Project"
39 SAT = "Sat"
40
41 ARCHIVE_DIR = "ARCHIVES"
42 PROJECT_DIR = "PROJECT"
43
44 IGNORED_DIRS = [".git", ".svn"]
45 IGNORED_EXTENSIONS = []
46
47 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
48
49 if src.architecture.is_windows():
50     PROJECT_TEMPLATE = """#!/usr/bin/env python
51 #-*- coding:utf-8 -*-
52
53 # The path to the archive root directory
54 root_path : $PWD + "/../"
55 # path to the PROJECT
56 project_path : $PWD + "/"
57
58 # Where to search the archives of the products
59 ARCHIVEPATH : $root_path + "ARCHIVES"
60 # Where to search the pyconf of the applications
61 APPLICATIONPATH : $project_path + "applications/"
62 # Where to search the pyconf of the products
63 PRODUCTPATH : $project_path + "products/"
64 # Where to search the pyconf of the jobs of the project
65 JOBPATH : $project_path + "jobs/"
66 # Where to search the pyconf of the machines of the project
67 MACHINEPATH : $project_path + "machines/"
68 """
69 else:
70     PROJECT_TEMPLATE = """#!/usr/bin/env python
71 #-*- coding:utf-8 -*-
72
73 # path to the PROJECT
74 project_path : $PWD + "/"
75
76 # Where to search the archives of the products
77 ARCHIVEPATH : $project_path + "ARCHIVES"
78 # Where to search the pyconf of the applications
79 APPLICATIONPATH : $project_path + "applications/"
80 # Where to search the pyconf of the products
81 PRODUCTPATH : $project_path + "products/"
82 # Where to search the pyconf of the jobs of the project
83 JOBPATH : $project_path + "jobs/"
84 # Where to search the pyconf of the machines of the project
85 MACHINEPATH : $project_path + "machines/"
86 """
87
88
89 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
90 #-*- coding:utf-8 -*-
91
92   LOCAL :
93   {
94     base : 'default'
95     workdir : 'default'
96     log_dir : 'default'
97     archive_dir : 'default'
98     VCS : 'unknown'
99     tag : 'unknown'
100   }
101
102 PROJECTS :
103 {
104   project_file_paths : 
105   [
106   ]
107 }
108 """)
109
110 # Define all possible option for the package command :  sat package <options>
111 parser = src.options.Options()
112 parser.add_option('b', 'binaries', 'boolean', 'binaries',
113     _('Optional: Produce a binary package.'), False)
114 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
115     _('Optional: Only binary package: produce the archive even if '
116       'there are some missing products.'), False)
117 parser.add_option('s', 'sources', 'boolean', 'sources',
118     _('Optional: Produce a compilable archive of the sources of the '
119       'application.'), False)
120 parser.add_option('', 'bin_products', 'boolean', 'bin_products',
121     _('Optional: Create binary archives for all products.'), False)
122 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
123     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
124       'Sat prepare will use VCS mode instead to retrieve them.'
125       '\n          Also, when combined with "--bin_products" option, restrict the building of product archives to VCS products.'),
126     False)
127 parser.add_option('', 'ftp', 'boolean', 'ftp',
128     _('Optional: Do not embed archives for products in archive mode.' 
129     'Sat prepare will use ftp instead to retrieve them'),
130     False)
131 parser.add_option('e', 'exe', 'string', 'exe',
132     _('Optional: Produce an extra launcher based upon the exe given as argument.'), "")
133 parser.add_option('p', 'project', 'string', 'project',
134     _('Optional: Produce an archive that contains a project.'), "")
135 parser.add_option('t', 'salometools', 'boolean', 'sat',
136     _('Optional: Produce an archive that contains salomeTools.'), False)
137 parser.add_option('n', 'name', 'string', 'name',
138     _('Optional: The name or full path of the archive.'), None)
139 parser.add_option('', 'add_files', 'list2', 'add_files',
140     _('Optional: The list of additional files to add to the archive.'), [])
141 parser.add_option('', 'without_properties', 'properties', 'without_properties',
142     _('Optional: Filter the products by their properties.\n\tSyntax: '
143       '--without_properties <property>:<value>'))
144
145
146 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
147     '''Create an archive containing all directories and files that are given in
148        the d_content argument.
149     
150     :param tar tarfile: The tarfile instance used to make the archive.
151     :param name_archive str: The name of the archive to make.
152     :param d_content dict: The dictionary that contain all directories and files
153                            to add in the archive.
154                            d_content[label] = 
155                                         (path_on_local_machine, path_in_archive)
156     :param logger Logger: the logging instance
157     :param f_exclude Function: the function that filters
158     :return: 0 if success, 1 if not.
159     :rtype: int
160     '''
161     # get the max length of the messages in order to make the display
162     max_len = len(max(d_content.keys(), key=len))
163     
164     success = 0
165     # loop over each directory or file stored in the d_content dictionary
166     names = sorted(d_content.keys())
167     DBG.write("add tar names", names)
168
169     # used to avoid duplications (for pip install in python, or single_install_dir cases)
170     already_added=set() 
171     for name in names:
172         # display information
173         len_points = max_len - len(name) + 3
174         local_path, archive_path = d_content[name]
175         in_archive = os.path.join(name_archive, archive_path)
176         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
177         # Get the local path and the path in archive 
178         # of the directory or file to add
179         # Add it in the archive
180         try:
181             key=local_path+"->"+in_archive
182             if key not in already_added:
183                 if old_python:
184                     tar.add(local_path,
185                                  arcname=in_archive,
186                                  exclude=exclude_VCS_and_extensions_26)
187                 else:
188                     tar.add(local_path,
189                                  arcname=in_archive,
190                                  filter=exclude_VCS_and_extensions)
191                 already_added.add(key)
192             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
193         except Exception as e:
194             logger.write(src.printcolors.printcError(_("KO ")), 3)
195             logger.write(str(e), 3)
196             success = 1
197         logger.write("\n", 3)
198     return success
199
200
201 def exclude_VCS_and_extensions_26(filename):
202     ''' The function that is used to exclude from package the link to the 
203         VCS repositories (like .git) (only for python 2.6)
204
205     :param filename Str: The filname to exclude (or not).
206     :return: True if the file has to be exclude
207     :rtype: Boolean
208     '''
209     for dir_name in IGNORED_DIRS:
210         if dir_name in filename:
211             return True
212     for extension in IGNORED_EXTENSIONS:
213         if filename.endswith(extension):
214             return True
215     return False
216
217 def exclude_VCS_and_extensions(tarinfo):
218     ''' The function that is used to exclude from package the link to the 
219         VCS repositories (like .git)
220
221     :param filename Str: The filname to exclude (or not).
222     :return: None if the file has to be exclude
223     :rtype: tarinfo or None
224     '''
225     filename = tarinfo.name
226     for dir_name in IGNORED_DIRS:
227         if dir_name in filename:
228             return None
229     for extension in IGNORED_EXTENSIONS:
230         if filename.endswith(extension):
231             return None
232     return tarinfo
233
234 def produce_relative_launcher(config,
235                               logger,
236                               file_dir,
237                               file_name,
238                               binaries_dir_name):
239     '''Create a specific SALOME launcher for the binary package. This launcher 
240        uses relative paths.
241     
242     :param config Config: The global configuration.
243     :param logger Logger: the logging instance
244     :param file_dir str: the directory where to put the launcher
245     :param file_name str: The launcher name
246     :param binaries_dir_name str: the name of the repository where the binaries
247                                   are, in the archive.
248     :return: the path of the produced launcher
249     :rtype: str
250     '''
251     
252     # set base mode to "no" for the archive - save current mode to restore it at the end
253     if "base" in config.APPLICATION:
254         base_setting=config.APPLICATION.base 
255     else:
256         base_setting="maybe"
257     config.APPLICATION.base="no"
258
259     # get KERNEL installation path 
260     kernel_info = src.product.get_product_config(config, "KERNEL")
261     kernel_base_name=os.path.basename(kernel_info.install_dir)
262     if kernel_info.install_mode == "base":
263         # case of kernel installed in base. the kernel install dir name is different in the archive
264         kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
265     
266     kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
267
268     # set kernel bin dir (considering fhs property)
269     kernel_cfg = src.product.get_product_config(config, "KERNEL")
270     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
271         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
272     else:
273         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
274
275     # check if the application contains an application module
276     # check also if the application has a distene product, 
277     # in this case get its licence file name
278     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
279     salome_application_name="Not defined" 
280     distene_licence_file_name=False
281     for prod_name, prod_info in l_product_info:
282         # look for a "salome application" and a distene product
283         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
284             distene_licence_file_name = src.product.product_has_licence(prod_info, 
285                                             config.PATHS.LICENCEPATH) 
286         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
287             salome_application_name=prod_info.name
288
289     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
290     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
291     if salome_application_name == "Not defined":
292         app_root_dir=kernel_root_dir
293     else:
294         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
295
296     additional_env={}
297     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
298                                                    config.VARS.sep + bin_kernel_install_dir
299     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
300         additional_env['sat_python_version'] = 3
301     else:
302         additional_env['sat_python_version'] = 2
303
304     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
305     launcher_name = src.get_launcher_name(config)
306     additional_env['APPLI'] = "out_dir_Path" + config.VARS.sep + file_name
307
308     # create an environment file writer
309     writer = src.environment.FileEnvWriter(config,
310                                            logger,
311                                            file_dir,
312                                            src_root=None,
313                                            env_info=None)
314     
315     filepath = os.path.join(file_dir, file_name)
316     # Write
317     writer.write_env_file(filepath,
318                           False,  # for launch
319                           "cfgForPy",
320                           additional_env=additional_env,
321                           no_path_init="False",
322                           for_package = binaries_dir_name)
323     
324     # Little hack to put out_dir_Path outside the strings
325     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
326     src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
327     
328     # A hack to put a call to a file for distene licence.
329     # It does nothing to an application that has no distene product
330     if distene_licence_file_name:
331         logger.write("Application has a distene licence file! We use it in package launcher", 5)
332         hack_for_distene_licence(filepath, distene_licence_file_name)
333        
334     # change the rights in order to make the file executable for everybody
335     os.chmod(filepath,
336              stat.S_IRUSR |
337              stat.S_IRGRP |
338              stat.S_IROTH |
339              stat.S_IWUSR |
340              stat.S_IXUSR |
341              stat.S_IXGRP |
342              stat.S_IXOTH)
343
344     # restore modified setting by its initial value
345     config.APPLICATION.base=base_setting
346
347     return filepath
348
349 def hack_for_distene_licence(filepath, licence_file):
350     '''Replace the distene licence env variable by a call to a file.
351     
352     :param filepath Str: The path to the launcher to modify.
353     '''  
354     shutil.move(filepath, filepath + "_old")
355     fileout= filepath
356     filein = filepath + "_old"
357     fin = open(filein, "r")
358     fout = open(fileout, "w")
359     text = fin.readlines()
360     # Find the Distene section
361     num_line = -1
362     for i,line in enumerate(text):
363         if "# Set DISTENE License" in line:
364             num_line = i
365             break
366     if num_line == -1:
367         # No distene product, there is nothing to do
368         fin.close()
369         for line in text:
370             fout.write(line)
371         fout.close()
372         return
373     del text[num_line +1]
374     del text[num_line +1]
375     text_to_insert ="""    try:
376         distene_licence_file=r"%s"
377         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
378             import importlib.util
379             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
380             distene=importlib.util.module_from_spec(spec_dist)
381             spec_dist.loader.exec_module(distene)
382         else:
383             import imp
384             distene = imp.load_source('distene_licence', distene_licence_file)
385         distene.set_distene_variables(context)
386     except:
387         pass\n"""  % licence_file
388     text.insert(num_line + 1, text_to_insert)
389     for line in text:
390         fout.write(line)
391     fin.close()    
392     fout.close()
393     return
394     
395 def produce_relative_env_files(config,
396                               logger,
397                               file_dir,
398                               binaries_dir_name,
399                               exe_name=None):
400     '''Create some specific environment files for the binary package. These 
401        files use relative paths.
402     
403     :param config Config: The global configuration.
404     :param logger Logger: the logging instance
405     :param file_dir str: the directory where to put the files
406     :param binaries_dir_name str: the name of the repository where the binaries
407                                   are, in the archive.
408     :param exe_name str: if given generate a launcher executing exe_name
409     :return: the list of path of the produced environment files
410     :rtype: List
411     '''  
412
413     # set base mode to "no" for the archive - save current mode to restore it at the end
414     if "base" in config.APPLICATION:
415         base_setting=config.APPLICATION.base 
416     else:
417         base_setting="maybe"
418     config.APPLICATION.base="no"
419
420     # create an environment file writer
421     writer = src.environment.FileEnvWriter(config,
422                                            logger,
423                                            file_dir,
424                                            src_root=None)
425     
426     if src.architecture.is_windows():
427       shell = "bat"
428       filename  = "env_launch.bat"
429     else:
430       shell = "bash"
431       filename  = "env_launch.sh"
432
433     if exe_name:
434         filename=os.path.basename(exe_name)
435
436     # Write
437     filepath = writer.write_env_file(filename,
438                           False, # for launch
439                           shell,
440                           for_package = binaries_dir_name)
441
442     # Little hack to put out_dir_Path as environment variable
443     if src.architecture.is_windows() :
444       src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
445       src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
446       src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
447     else:
448       src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
449       src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
450
451     if exe_name:
452         if src.architecture.is_windows():
453             cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
454         else:
455             cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
456         with open(filepath, "a") as exe_launcher:
457             exe_launcher.write(cmd)
458
459     # change the rights in order to make the file executable for everybody
460     os.chmod(filepath,
461              stat.S_IRUSR |
462              stat.S_IRGRP |
463              stat.S_IROTH |
464              stat.S_IWUSR |
465              stat.S_IXUSR |
466              stat.S_IXGRP |
467              stat.S_IXOTH)
468     
469     # restore modified setting by its initial value
470     config.APPLICATION.base=base_setting
471
472     return filepath
473
474 def produce_install_bin_file(config,
475                              logger,
476                              file_dir,
477                              d_sub,
478                              file_name):
479     '''Create a bash shell script which do substitutions in BIRARIES dir 
480        in order to use it for extra compilations.
481     
482     :param config Config: The global configuration.
483     :param logger Logger: the logging instance
484     :param file_dir str: the directory where to put the files
485     :param d_sub, dict: the dictionnary that contains the substitutions to be done
486     :param file_name str: the name of the install script file
487     :return: the produced file
488     :rtype: str
489     '''  
490     # Write
491     filepath = os.path.join(file_dir, file_name)
492     # open the file and write into it
493     # use codec utf-8 as sat variables are in unicode
494     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
495         installbin_template_path = os.path.join(config.VARS.internal_dir,
496                                         "INSTALL_BIN.template")
497         
498         # build the name of the directory that will contain the binaries
499         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
500         # build the substitution loop
501         loop_cmd = "for f in $(grep -RIl"
502         for key in d_sub:
503             loop_cmd += " -e "+ key
504         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
505                     '); do\n     sed -i "\n'
506         for key in d_sub:
507             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
508         loop_cmd += '            " $f\ndone'
509
510         d={}
511         d["BINARIES_DIR"] = binaries_dir_name
512         d["SUBSTITUTION_LOOP"]=loop_cmd
513         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
514         
515         # substitute the template and write it in file
516         content=src.template.substitute(installbin_template_path, d)
517         installbin_file.write(content)
518         # change the rights in order to make the file executable for everybody
519         os.chmod(filepath,
520                  stat.S_IRUSR |
521                  stat.S_IRGRP |
522                  stat.S_IROTH |
523                  stat.S_IWUSR |
524                  stat.S_IXUSR |
525                  stat.S_IXGRP |
526                  stat.S_IXOTH)
527     
528     return filepath
529
530 def product_appli_creation_script(config,
531                                   logger,
532                                   file_dir,
533                                   binaries_dir_name):
534     '''Create a script that can produce an application (EDF style) in the binary
535        package.
536     
537     :param config Config: The global configuration.
538     :param logger Logger: the logging instance
539     :param file_dir str: the directory where to put the file
540     :param binaries_dir_name str: the name of the repository where the binaries
541                                   are, in the archive.
542     :return: the path of the produced script file
543     :rtype: Str
544     '''
545     template_name = "create_appli.py.for_bin_packages.template"
546     template_path = os.path.join(config.VARS.internal_dir, template_name)
547     text_to_fill = open(template_path, "r").read()
548     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
549                                         '"' + binaries_dir_name + '"')
550     
551     text_to_add = ""
552     for product_name in get_SALOME_modules(config):
553         product_info = src.product.get_product_config(config, product_name)
554        
555         if src.product.product_is_smesh_plugin(product_info):
556             continue
557
558         if 'install_dir' in product_info and bool(product_info.install_dir):
559             if src.product.product_is_cpp(product_info):
560                 # cpp module
561                 for cpp_name in src.product.get_product_components(product_info):
562                     line_to_add = ("<module name=\"" + 
563                                    cpp_name + 
564                                    "\" gui=\"yes\" path=\"''' + "
565                                    "os.path.join(dir_bin_name, \"" + 
566                                    cpp_name + "\") + '''\"/>")
567             else:
568                 # regular module
569                 line_to_add = ("<module name=\"" + 
570                                product_name + 
571                                "\" gui=\"yes\" path=\"''' + "
572                                "os.path.join(dir_bin_name, \"" + 
573                                product_name + "\") + '''\"/>")
574             text_to_add += line_to_add + "\n"
575     
576     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
577     
578     tmp_file_path = os.path.join(file_dir, "create_appli.py")
579     ff = open(tmp_file_path, "w")
580     ff.write(filled_text)
581     ff.close()
582     
583     # change the rights in order to make the file executable for everybody
584     os.chmod(tmp_file_path,
585              stat.S_IRUSR |
586              stat.S_IRGRP |
587              stat.S_IROTH |
588              stat.S_IWUSR |
589              stat.S_IXUSR |
590              stat.S_IXGRP |
591              stat.S_IXOTH)
592     
593     return tmp_file_path
594
595 def bin_products_archives(config, logger, only_vcs):
596     '''Prepare binary packages for all products
597     :param config Config: The global configuration.
598     :return: the error status
599     :rtype: bool
600     '''
601
602     logger.write("Make %s binary archives\n" % config.VARS.dist)
603     # Get the default directory where to put the packages
604     binpackage_path = os.path.join(config.APPLICATION.workdir, "PACKAGE", "products")
605     src.ensure_path_exists(binpackage_path)
606     # Get the list of product installation to add to the archive
607     l_products_name = sorted(config.APPLICATION.products.keys())
608     l_product_info = src.product.get_products_infos(l_products_name,
609                                                     config)
610     # first loop on products : filter products, analyse properties,
611     # and store the information that will be used to create the archive in the second loop 
612     l_not_installed=[] # store not installed products for warning at the end
613     for prod_name, prod_info in l_product_info:
614         # ignore the native and fixed products for install directories
615         if (src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
616                 or src.product.product_is_native(prod_info) 
617                 or src.product.product_is_fixed(prod_info)
618                 or not src.product.product_compiles(prod_info)):
619             continue
620         if only_vcs and not src.product.product_is_vcs(prod_info):
621             continue
622         if not src.product.check_installation(config, prod_info):
623             l_not_installed.append(prod_name)
624             continue  # product is not installed, we skip it
625         # prepare call to make_bin_archive
626         path_targz_prod = os.path.join(binpackage_path, prod_name + '-' + prod_info.version + "-" + config.VARS.dist + PACKAGE_EXT) 
627         targz_prod = tarfile.open(path_targz_prod, mode='w:gz')
628         bin_path = prod_info.install_dir
629         targz_prod.add(bin_path)
630         targz_prod.close()
631         # Python program to find MD5 hash value of a file
632         import hashlib
633         with open(path_targz_prod,"rb") as f:
634             bytes = f.read() # read file as bytes
635             readable_hash = hashlib.md5(bytes).hexdigest();
636             with open(path_targz_prod+".md5", "w") as md5sum:
637                md5sum.write("%s  %s" % (readable_hash, os.path.basename(path_targz_prod))) 
638             logger.write("   archive : %s   (md5sum = %s)\n" % (path_targz_prod, readable_hash))
639
640     return 0
641
642 def binary_package(config, logger, options, tmp_working_dir):
643     '''Prepare a dictionary that stores all the needed directories and files to
644        add in a binary package.
645     
646     :param config Config: The global configuration.
647     :param logger Logger: the logging instance
648     :param options OptResult: the options of the launched command
649     :param tmp_working_dir str: The temporary local directory containing some 
650                                 specific directories or files needed in the 
651                                 binary package
652     :return: the dictionary that stores all the needed directories and files to
653              add in a binary package.
654              {label : (path_on_local_machine, path_in_archive)}
655     :rtype: dict
656     '''
657
658     # Get the list of product installation to add to the archive
659     l_products_name = sorted(config.APPLICATION.products.keys())
660     l_product_info = src.product.get_products_infos(l_products_name,
661                                                     config)
662
663     # suppress compile time products for binaries-only archives
664     if not options.sources:
665         update_config(config, logger, "compile_time", "yes")
666
667     l_install_dir = []
668     l_source_dir = []
669     l_not_installed = []
670     l_sources_not_present = []
671     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
672     if ("APPLICATION" in config  and
673         "properties"  in config.APPLICATION  and
674         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
675         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
676             generate_mesa_launcher=True
677
678     # first loop on products : filter products, analyse properties,
679     # and store the information that will be used to create the archive in the second loop 
680     for prod_name, prod_info in l_product_info:
681         # skip product with property not_in_package set to yes
682         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
683             continue  
684
685         # Add the sources of the products that have the property 
686         # sources_in_package : "yes"
687         if src.get_property_in_product_cfg(prod_info,
688                                            "sources_in_package") == "yes":
689             if os.path.exists(prod_info.source_dir):
690                 l_source_dir.append((prod_name, prod_info.source_dir))
691             else:
692                 l_sources_not_present.append(prod_name)
693
694         # ignore the native and fixed products for install directories
695         if (src.product.product_is_native(prod_info) 
696                 or src.product.product_is_fixed(prod_info)
697                 or not src.product.product_compiles(prod_info)):
698             continue
699         # 
700         # products with single_dir property will be installed in the PRODUCTS directory of the archive
701         is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
702                        src.product.product_test_property(prod_info,"single_install_dir", "yes"))
703         if src.product.check_installation(config, prod_info):
704             l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir,
705                                   is_single_dir, prod_info.install_mode))
706         else:
707             l_not_installed.append(prod_name)
708         
709         # Add also the cpp generated modules (if any)
710         if src.product.product_is_cpp(prod_info):
711             # cpp module
712             for name_cpp in src.product.get_product_components(prod_info):
713                 install_dir = os.path.join(config.APPLICATION.workdir,
714                                            config.INTERNAL.config.install_dir,
715                                            name_cpp) 
716                 if os.path.exists(install_dir):
717                     l_install_dir.append((name_cpp, name_cpp, install_dir, False, "value"))
718                 else:
719                     l_not_installed.append(name_cpp)
720         
721     # check the name of the directory that (could) contains the binaries 
722     # from previous detar
723     binaries_from_detar = os.path.join(
724                               config.APPLICATION.workdir,
725                               config.INTERNAL.config.binary_dir + config.VARS.dist)
726     if os.path.exists(binaries_from_detar):
727          logger.write("""
728 WARNING: existing binaries directory from previous detar installation:
729          %s
730          To make new package from this, you have to: 
731          1) install binaries in INSTALL directory with the script "install_bin.sh" 
732             see README file for more details
733          2) or recompile everything in INSTALL with "sat compile" command 
734             this step is long, and requires some linux packages to be installed 
735             on your system\n
736 """ % binaries_from_detar)
737     
738     # Print warning or error if there are some missing products
739     if len(l_not_installed) > 0:
740         text_missing_prods = ""
741         for p_name in l_not_installed:
742             text_missing_prods += " - " + p_name + "\n"
743         if not options.force_creation:
744             msg = _("ERROR: there are missing product installations:")
745             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
746                                      text_missing_prods),
747                          1)
748             raise src.SatException(msg)
749         else:
750             msg = _("WARNING: there are missing products installations:")
751             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
752                                      text_missing_prods),
753                          1)
754
755     # Do the same for sources
756     if len(l_sources_not_present) > 0:
757         text_missing_prods = ""
758         for p_name in l_sources_not_present:
759             text_missing_prods += "-" + p_name + "\n"
760         if not options.force_creation:
761             msg = _("ERROR: there are missing product sources:")
762             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
763                                      text_missing_prods),
764                          1)
765             raise src.SatException(msg)
766         else:
767             msg = _("WARNING: there are missing products sources:")
768             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
769                                      text_missing_prods),
770                          1)
771  
772     # construct the name of the directory that will contain the binaries
773     if src.architecture.is_windows():
774         binaries_dir_name = config.INTERNAL.config.binary_dir
775     else:
776         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
777     # construct the correlation table between the product names, there 
778     # actual install directories and there install directory in archive
779     d_products = {}
780     for prod_name, prod_info_name, install_dir, is_single_dir, install_mode in l_install_dir:
781         prod_base_name=os.path.basename(install_dir)
782         if install_mode == "base":
783             # case of a products installed in base. 
784             # because the archive is in base:no mode, the name of the install dir is different inside archive
785             # we set it to the product name or by PRODUCTS if single-dir
786             if is_single_dir:
787                 prod_base_name=config.INTERNAL.config.single_install_dir
788             else:
789                 prod_base_name=prod_info_name
790         path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
791         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
792         
793     for prod_name, source_dir in l_source_dir:
794         path_in_archive = os.path.join("SOURCES", prod_name)
795         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
796
797     # create an archives of compilation logs, and insert it into the tarball
798     logpath=os.path.join(config.APPLICATION.workdir, "LOGS")
799     path_targz_logs = os.path.join(tmp_working_dir, "logs.tgz")
800     tar_log = tarfile.open(path_targz_logs, mode='w:gz')
801     tar_log.add(logpath, arcname="LOGS")
802     tar_log.close()
803     d_products["LOGS"] = (path_targz_logs, "logs.tgz")
804  
805     # for packages of SALOME applications including KERNEL, 
806     # we produce a salome launcher or a virtual application (depending on salome version)
807     if 'KERNEL' in config.APPLICATION.products:
808         VersionSalome = src.get_salome_version(config)
809         # Case where SALOME has the launcher that uses the SalomeContext API
810         if VersionSalome >= 730:
811             # create the relative launcher and add it to the files to add
812             launcher_name = src.get_launcher_name(config)
813             launcher_package = produce_relative_launcher(config,
814                                                  logger,
815                                                  tmp_working_dir,
816                                                  launcher_name,
817                                                  binaries_dir_name)
818             d_products["launcher"] = (launcher_package, launcher_name)
819
820             # if the application contains mesa products, we generate in addition to the 
821             # classical salome launcher a launcher using mesa and called mesa_salome 
822             # (the mesa launcher will be used for remote usage through ssh).
823             if generate_mesa_launcher:
824                 #if there is one : store the use_mesa property
825                 restore_use_mesa_option=None
826                 if ('properties' in config.APPLICATION and 
827                     'use_mesa' in config.APPLICATION.properties):
828                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
829
830                 # activate mesa property, and generate a mesa launcher
831                 src.activate_mesa_property(config)  #activate use_mesa property
832                 launcher_mesa_name="mesa_"+launcher_name
833                 launcher_package_mesa = produce_relative_launcher(config,
834                                                      logger,
835                                                      tmp_working_dir,
836                                                      launcher_mesa_name,
837                                                      binaries_dir_name)
838                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
839
840                 # if there was a use_mesa value, we restore it
841                 # else we set it to the default value "no"
842                 if restore_use_mesa_option != None:
843                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
844                 else:
845                     config.APPLICATION.properties.use_mesa="no"
846
847             if options.sources:
848                 # if we mix binaries and sources, we add a copy of the launcher, 
849                 # prefixed  with "bin",in order to avoid clashes
850                 launcher_copy_name="bin"+launcher_name
851                 launcher_package_copy = produce_relative_launcher(config,
852                                                      logger,
853                                                      tmp_working_dir,
854                                                      launcher_copy_name,
855                                                      binaries_dir_name)
856                 d_products["launcher (copy)"] = (launcher_package_copy, launcher_copy_name)
857         else:
858             # Provide a script for the creation of an application EDF style
859             appli_script = product_appli_creation_script(config,
860                                                         logger,
861                                                         tmp_working_dir,
862                                                         binaries_dir_name)
863             
864             d_products["appli script"] = (appli_script, "create_appli.py")
865
866     # Put also the environment file
867     env_file = produce_relative_env_files(config,
868                                            logger,
869                                            tmp_working_dir,
870                                            binaries_dir_name)
871
872     if src.architecture.is_windows():
873       filename  = "env_launch.bat"
874     else:
875       filename  = "env_launch.sh"
876     d_products["environment file"] = (env_file, filename)      
877
878     # If option exe, produce an extra launcher based on specified exe
879     if options.exe:
880         exe_file = produce_relative_env_files(config,
881                                               logger,
882                                               tmp_working_dir,
883                                               binaries_dir_name,
884                                               options.exe)
885             
886         if src.architecture.is_windows():
887           filename  = os.path.basename(options.exe) + ".bat"
888         else:
889           filename  = os.path.basename(options.exe) + ".sh"
890         d_products["exe file"] = (exe_file, filename)      
891     
892
893     return d_products
894
895 def source_package(sat, config, logger, options, tmp_working_dir):
896     '''Prepare a dictionary that stores all the needed directories and files to
897        add in a source package.
898     
899     :param config Config: The global configuration.
900     :param logger Logger: the logging instance
901     :param options OptResult: the options of the launched command
902     :param tmp_working_dir str: The temporary local directory containing some 
903                                 specific directories or files needed in the 
904                                 binary package
905     :return: the dictionary that stores all the needed directories and files to
906              add in a source package.
907              {label : (path_on_local_machine, path_in_archive)}
908     :rtype: dict
909     '''
910     
911     d_archives={}
912     # Get all the products that are prepared using an archive
913     # unless ftp mode is specified (in this case the user of the
914     # archive will get the sources through the ftp mode of sat prepare
915     if not options.ftp:
916         logger.write("Find archive products ... ")
917         d_archives, l_pinfo_vcs = get_archives(config, logger)
918         logger.write("Done\n")
919
920     d_archives_vcs = {}
921     if not options.with_vcs and len(l_pinfo_vcs) > 0:
922         # Make archives with the products that are not prepared using an archive
923         # (git, cvs, svn, etc)
924         logger.write("Construct archives for vcs products ... ")
925         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
926                                           sat,
927                                           config,
928                                           logger,
929                                           tmp_working_dir)
930         logger.write("Done\n")
931
932     # Create a project
933     logger.write("Create the project ... ")
934     d_project = create_project_for_src_package(config,
935                                                tmp_working_dir,
936                                                options.with_vcs,
937                                                options.ftp)
938     logger.write("Done\n")
939     
940     # Add salomeTools
941     tmp_sat = add_salomeTools(config, tmp_working_dir)
942     d_sat = {"salomeTools" : (tmp_sat, "sat")}
943     
944     # Add a sat symbolic link if not win
945     if not src.architecture.is_windows():
946         try:
947             t = os.getcwd()
948         except:
949             # In the jobs, os.getcwd() can fail
950             t = config.LOCAL.workdir
951         os.chdir(tmp_working_dir)
952
953         # create a symlink, to avoid reference with "salomeTool/.."
954         os.chdir("PROJECT")
955         if os.path.lexists("ARCHIVES"):
956             os.remove("ARCHIVES")
957         os.symlink("../ARCHIVES", "ARCHIVES")
958         os.chdir(t)
959         
960         d_sat["sat archive link"] = (os.path.join(tmp_working_dir,"PROJECT", "ARCHIVES"), 
961                                      os.path.join("PROJECT", "ARCHIVES"))
962     
963     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
964     return d_source
965
966 def get_archives(config, logger):
967     '''Find all the products that are get using an archive and all the products
968        that are get using a vcs (git, cvs, svn) repository.
969     
970     :param config Config: The global configuration.
971     :param logger Logger: the logging instance
972     :return: the dictionary {name_product : 
973              (local path of its archive, path in the package of its archive )}
974              and the list of specific configuration corresponding to the vcs 
975              products
976     :rtype: (Dict, List)
977     '''
978     # Get the list of product informations
979     l_products_name = config.APPLICATION.products.keys()
980     l_product_info = src.product.get_products_infos(l_products_name,
981                                                     config)
982     d_archives = {}
983     l_pinfo_vcs = []
984     for p_name, p_info in l_product_info:
985         # skip product with property not_in_package set to yes
986         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
987             continue  
988         # ignore the native and fixed products
989         if (src.product.product_is_native(p_info) 
990                 or src.product.product_is_fixed(p_info)):
991             continue
992         if p_info.get_source == "archive":
993             archive_path = p_info.archive_info.archive_name
994             archive_name = os.path.basename(archive_path)
995             d_archives[p_name] = (archive_path,
996                                   os.path.join(ARCHIVE_DIR, archive_name))
997             if (src.appli_test_property(config,"pip", "yes") and 
998                 src.product.product_test_property(p_info,"pip", "yes")):
999                 # if pip mode is activated, and product is managed by pip
1000                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
1001                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
1002                     "%s-%s*" % (p_info.name, p_info.version))
1003                 pip_wheel_path=glob.glob(pip_wheel_pattern)
1004                 msg_pip_not_found="Error in get_archive, pip wheel for "\
1005                                   "product %s-%s was not found in %s directory"
1006                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
1007                                   "product %s-%s were found in %s directory"
1008                 if len(pip_wheel_path)==0:
1009                     raise src.SatException(msg_pip_not_found %\
1010                         (p_info.name, p_info.version, pip_wheels_dir))
1011                 if len(pip_wheel_path)>1:
1012                     raise src.SatException(msg_pip_two_or_more %\
1013                         (p_info.name, p_info.version, pip_wheels_dir))
1014
1015                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
1016                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
1017                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
1018         else:
1019             # this product is not managed by archive, 
1020             # an archive of the vcs directory will be created by get_archive_vcs
1021             l_pinfo_vcs.append((p_name, p_info)) 
1022             
1023     return d_archives, l_pinfo_vcs
1024
1025 def add_salomeTools(config, tmp_working_dir):
1026     '''Prepare a version of salomeTools that has a specific local.pyconf file 
1027        configured for a source package.
1028
1029     :param config Config: The global configuration.
1030     :param tmp_working_dir str: The temporary local directory containing some 
1031                                 specific directories or files needed in the 
1032                                 source package
1033     :return: The path to the local salomeTools directory to add in the package
1034     :rtype: str
1035     '''
1036     # Copy sat in the temporary working directory
1037     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
1038     sat_running_path = src.Path(config.VARS.salometoolsway)
1039     sat_running_path.copy(sat_tmp_path)
1040     
1041     # Update the local.pyconf file that contains the path to the project
1042     local_pyconf_name = "local.pyconf"
1043     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
1044     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
1045     # Remove the .pyconf file in the root directory of salomeTools if there is
1046     # any. (For example when launching jobs, a pyconf file describing the jobs 
1047     # can be here and is not useful) 
1048     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
1049     for file_or_dir in files_or_dir_SAT:
1050         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
1051             file_path = os.path.join(tmp_working_dir,
1052                                      "salomeTools",
1053                                      file_or_dir)
1054             os.remove(file_path)
1055     
1056     ff = open(local_pyconf_file, "w")
1057     ff.write(LOCAL_TEMPLATE)
1058     ff.close()
1059     
1060     return sat_tmp_path.path
1061
1062 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
1063     '''For sources package that require that all products are get using an 
1064        archive, one has to create some archive for the vcs products.
1065        So this method calls the clean and source command of sat and then create
1066        the archives.
1067
1068     :param l_pinfo_vcs List: The list of specific configuration corresponding to
1069                              each vcs product
1070     :param sat Sat: The Sat instance that can be called to clean and source the
1071                     products
1072     :param config Config: The global configuration.
1073     :param logger Logger: the logging instance
1074     :param tmp_working_dir str: The temporary local directory containing some 
1075                                 specific directories or files needed in the 
1076                                 source package
1077     :return: the dictionary that stores all the archives to add in the source 
1078              package. {label : (path_on_local_machine, path_in_archive)}
1079     :rtype: dict
1080     '''
1081     # clean the source directory of all the vcs products, then use the source 
1082     # command and thus construct an archive that will not contain the patches
1083     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
1084     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
1085       logger.write(_("\nclean sources\n"))
1086       args_clean = config.VARS.application
1087       args_clean += " --sources --products "
1088       args_clean += ",".join(l_prod_names)
1089       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
1090       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
1091     if True:
1092       # source
1093       logger.write(_("get sources\n"))
1094       args_source = config.VARS.application
1095       args_source += " --products "
1096       args_source += ",".join(l_prod_names)
1097       svgDir = sat.cfg.APPLICATION.workdir
1098       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
1099       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
1100       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
1101       # DBG.write("sat config id", id(sat.cfg), True)
1102       # shit as config is not same id() as for sat.source()
1103       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
1104       import source
1105       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
1106       
1107       # make the new archives
1108       d_archives_vcs = {}
1109       for pn, pinfo in l_pinfo_vcs:
1110           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
1111           logger.write("make archive vcs '%s'\n" % path_archive)
1112           d_archives_vcs[pn] = (path_archive,
1113                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
1114       sat.cfg.APPLICATION.workdir = svgDir
1115       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
1116     return d_archives_vcs
1117
1118 def make_bin_archive(prod_name, prod_info, where):
1119     '''Create an archive of a product by searching its source directory.
1120
1121     :param prod_name str: The name of the product.
1122     :param prod_info Config: The specific configuration corresponding to the 
1123                              product
1124     :param where str: The path of the repository where to put the resulting 
1125                       archive
1126     :return: The path of the resulting archive
1127     :rtype: str
1128     '''
1129     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1130     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1131     bin_path = prod_info.install_dir
1132     tar_prod.add(bin_path, arcname=path_targz_prod)
1133     tar_prod.close()
1134     return path_targz_prod       
1135
1136 def make_archive(prod_name, prod_info, where):
1137     '''Create an archive of a product by searching its source directory.
1138
1139     :param prod_name str: The name of the product.
1140     :param prod_info Config: The specific configuration corresponding to the 
1141                              product
1142     :param where str: The path of the repository where to put the resulting 
1143                       archive
1144     :return: The path of the resulting archive
1145     :rtype: str
1146     '''
1147     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1148     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1149     local_path = prod_info.source_dir
1150     if old_python:
1151         tar_prod.add(local_path,
1152                      arcname=prod_name,
1153                      exclude=exclude_VCS_and_extensions_26)
1154     else:
1155         tar_prod.add(local_path,
1156                      arcname=prod_name,
1157                      filter=exclude_VCS_and_extensions)
1158     tar_prod.close()
1159     return path_targz_prod       
1160
1161 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
1162     '''Create a specific project for a source package.
1163
1164     :param config Config: The global configuration.
1165     :param tmp_working_dir str: The temporary local directory containing some 
1166                                 specific directories or files needed in the 
1167                                 source package
1168     :param with_vcs boolean: True if the package is with vcs products (not 
1169                              transformed into archive products)
1170     :param with_ftp boolean: True if the package use ftp servers to get archives
1171     :return: The dictionary 
1172              {"project" : (produced project, project path in the archive)}
1173     :rtype: Dict
1174     '''
1175
1176     # Create in the working temporary directory the full project tree
1177     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1178     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
1179                                          "products")
1180     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
1181                                          "products",
1182                                          "compil_scripts")
1183     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
1184                                          "products",
1185                                          "env_scripts")
1186     patches_tmp_dir = os.path.join(project_tmp_dir,
1187                                          "products",
1188                                          "patches")
1189     application_tmp_dir = os.path.join(project_tmp_dir,
1190                                          "applications")
1191     for directory in [project_tmp_dir,
1192                       compil_scripts_tmp_dir,
1193                       env_scripts_tmp_dir,
1194                       patches_tmp_dir,
1195                       application_tmp_dir]:
1196         src.ensure_path_exists(directory)
1197
1198     # Create the pyconf that contains the information of the project
1199     project_pyconf_name = "project.pyconf"        
1200     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1201     ff = open(project_pyconf_file, "w")
1202     ff.write(PROJECT_TEMPLATE)
1203     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1204         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
1205         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1206             ftp_path=ftp_path+":"+ftpserver
1207         ftp_path+='"'
1208         ff.write("# ftp servers where to search for prerequisite archives\n")
1209         ff.write(ftp_path)
1210     # add licence paths if any
1211     if len(config.PATHS.LICENCEPATH) > 0:  
1212         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1213         for path in config.PATHS.LICENCEPATH[1:]:
1214             licence_path=licence_path+":"+path
1215         licence_path+='"'
1216         ff.write("\n# Where to search for licences\n")
1217         ff.write(licence_path)
1218         
1219
1220     ff.close()
1221     
1222     # Loop over the products to get there pyconf and all the scripts 
1223     # (compilation, environment, patches)
1224     # and create the pyconf file to add to the project
1225     lproducts_name = config.APPLICATION.products.keys()
1226     l_products = src.product.get_products_infos(lproducts_name, config)
1227     for p_name, p_info in l_products:
1228         # skip product with property not_in_package set to yes
1229         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1230             continue  
1231         find_product_scripts_and_pyconf(p_name,
1232                                         p_info,
1233                                         config,
1234                                         with_vcs,
1235                                         compil_scripts_tmp_dir,
1236                                         env_scripts_tmp_dir,
1237                                         patches_tmp_dir,
1238                                         products_pyconf_tmp_dir)
1239     
1240     # for the application pyconf, we write directly the config
1241     # don't search for the original pyconf file
1242     # to avoid problems with overwrite sections and rm_products key
1243     write_application_pyconf(config, application_tmp_dir)
1244     
1245     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1246     return d_project
1247
1248 def find_product_scripts_and_pyconf(p_name,
1249                                     p_info,
1250                                     config,
1251                                     with_vcs,
1252                                     compil_scripts_tmp_dir,
1253                                     env_scripts_tmp_dir,
1254                                     patches_tmp_dir,
1255                                     products_pyconf_tmp_dir):
1256     '''Create a specific pyconf file for a given product. Get its environment 
1257        script, its compilation script and patches and put it in the temporary
1258        working directory. This method is used in the source package in order to
1259        construct the specific project.
1260
1261     :param p_name str: The name of the product.
1262     :param p_info Config: The specific configuration corresponding to the 
1263                              product
1264     :param config Config: The global configuration.
1265     :param with_vcs boolean: True if the package is with vcs products (not 
1266                              transformed into archive products)
1267     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1268                                        scripts directory of the project.
1269     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1270                                     directory of the project.
1271     :param patches_tmp_dir str: The path to the temporary patch scripts 
1272                                 directory of the project.
1273     :param products_pyconf_tmp_dir str: The path to the temporary product 
1274                                         scripts directory of the project.
1275     '''
1276     
1277     # read the pyconf of the product
1278     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1279
1280     # find the compilation script if any
1281     if src.product.product_has_script(p_info):
1282         compil_script_path = src.Path(p_info.compil_script)
1283         compil_script_path.copy(compil_scripts_tmp_dir)
1284
1285     # find the environment script if any
1286     if src.product.product_has_env_script(p_info):
1287         env_script_path = src.Path(p_info.environ.env_script)
1288         env_script_path.copy(env_scripts_tmp_dir)
1289
1290     # find the patches if any
1291     if src.product.product_has_patches(p_info):
1292         patches = src.pyconf.Sequence()
1293         for patch_path in p_info.patches:
1294             p_path = src.Path(patch_path)
1295             p_path.copy(patches_tmp_dir)
1296             patches.append(os.path.basename(patch_path), "")
1297
1298     if (not with_vcs) and src.product.product_is_vcs(p_info):
1299         # in non vcs mode, if the product is not archive, then make it become archive.
1300
1301         # depending upon the incremental mode, select impacted sections
1302         if "properties" in p_info and "incremental" in p_info.properties and\
1303             p_info.properties.incremental == "yes":
1304             sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1305         else:
1306             sections = [p_info.section]
1307         for section in sections:
1308             if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1309                 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1310                           (p_name,section))
1311                 product_pyconf_cfg[section].get_source = "archive"
1312                 if not "archive_info" in product_pyconf_cfg[section]:
1313                     product_pyconf_cfg[section].addMapping("archive_info",
1314                                         src.pyconf.Mapping(product_pyconf_cfg),
1315                                         "")
1316                     product_pyconf_cfg[section].archive_info.archive_name =\
1317                         p_info.name + ".tgz"
1318     
1319     if (with_vcs) and src.product.product_is_vcs(p_info):
1320         # in vcs mode we must replace explicitely the git server url
1321         # (or it will not be found later because project files are not exported in archives)
1322         for section in product_pyconf_cfg:
1323             # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
1324             if "git_info" in product_pyconf_cfg[section]:
1325                 for repo in product_pyconf_cfg[section].git_info:
1326                     if repo in p_info.git_info:
1327                         product_pyconf_cfg[section].git_info[repo] =  p_info.git_info[repo]
1328
1329     # write the pyconf file to the temporary project location
1330     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1331                                            p_name + ".pyconf")
1332     ff = open(product_tmp_pyconf_path, 'w')
1333     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1334     product_pyconf_cfg.__save__(ff, 1)
1335     ff.close()
1336
1337
1338 def write_application_pyconf(config, application_tmp_dir):
1339     '''Write the application pyconf file in the specific temporary 
1340        directory containing the specific project of a source package.
1341
1342     :param config Config: The global configuration.
1343     :param application_tmp_dir str: The path to the temporary application 
1344                                     scripts directory of the project.
1345     '''
1346     application_name = config.VARS.application
1347     # write the pyconf file to the temporary application location
1348     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1349                                                application_name + ".pyconf")
1350     with open(application_tmp_pyconf_path, 'w') as f:
1351         f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1352         res = src.pyconf.Config()
1353         app = src.pyconf.deepCopyMapping(config.APPLICATION)
1354
1355         # set base mode to "no" for the archive
1356         app.base = "no"
1357
1358         # Change the workdir
1359         app.workdir = src.pyconf.Reference(
1360                                  app,
1361                                  src.pyconf.DOLLAR,
1362                                  'VARS.salometoolsway + $VARS.sep + ".."')
1363         res.addMapping("APPLICATION", app, "")
1364         res.__save__(f, evaluated=False)
1365     
1366
1367 def sat_package(config, tmp_working_dir, options, logger):
1368     '''Prepare a dictionary that stores all the needed directories and files to
1369        add in a salomeTool package.
1370     
1371     :param tmp_working_dir str: The temporary local working directory 
1372     :param options OptResult: the options of the launched command
1373     :return: the dictionary that stores all the needed directories and files to
1374              add in a salomeTool package.
1375              {label : (path_on_local_machine, path_in_archive)}
1376     :rtype: dict
1377     '''
1378     d_project = {}
1379
1380     # we include sat himself
1381     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1382
1383     # and we overwrite local.pyconf with a clean wersion.
1384     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1385     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1386     local_cfg = src.pyconf.Config(local_file_path)
1387     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1388     local_cfg.LOCAL["base"] = "default"
1389     local_cfg.LOCAL["workdir"] = "default"
1390     local_cfg.LOCAL["log_dir"] = "default"
1391     local_cfg.LOCAL["archive_dir"] = "default"
1392     local_cfg.LOCAL["VCS"] = "None"
1393     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1394
1395     # if the archive contains a project, we write its relative path in local.pyconf
1396     if options.project:
1397         project_arch_path = os.path.join("projects", options.project, 
1398                                          os.path.basename(options.project_file_path))
1399         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1400
1401     ff = open(local_pyconf_tmp_path, 'w')
1402     local_cfg.__save__(ff, 1)
1403     ff.close()
1404     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1405     return d_project
1406     
1407
1408 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1409     '''Prepare a dictionary that stores all the needed directories and files to
1410        add in a project package.
1411     
1412     :param project_file_path str: The path to the local project.
1413     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1414     :param tmp_working_dir str: The temporary local directory containing some 
1415                                 specific directories or files needed in the 
1416                                 project package
1417     :param embedded_in_sat boolean : the project package is embedded in a sat package
1418     :return: the dictionary that stores all the needed directories and files to
1419              add in a project package.
1420              {label : (path_on_local_machine, path_in_archive)}
1421     :rtype: dict
1422     '''
1423     d_project = {}
1424     # Read the project file and get the directories to add to the package
1425     
1426     try: 
1427       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1428     except:
1429       logger.write("""
1430 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1431       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1432       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1433     
1434     paths = {"APPLICATIONPATH" : "applications",
1435              "PRODUCTPATH" : "products",
1436              "JOBPATH" : "jobs",
1437              "MACHINEPATH" : "machines"}
1438     if not ftp_mode:
1439         paths["ARCHIVEPATH"] = "archives"
1440
1441     # Loop over the project paths and add it
1442     project_file_name = os.path.basename(project_file_path)
1443     for path in paths:
1444         if path not in project_pyconf_cfg:
1445             continue
1446         if embedded_in_sat:
1447             dest_path = os.path.join("projects", name_project, paths[path])
1448             project_file_dest = os.path.join("projects", name_project, project_file_name)
1449         else:
1450             dest_path = paths[path]
1451             project_file_dest = project_file_name
1452
1453         # Add the directory to the files to add in the package
1454         d_project[path] = (project_pyconf_cfg[path], dest_path)
1455
1456         # Modify the value of the path in the package
1457         project_pyconf_cfg[path] = src.pyconf.Reference(
1458                                     project_pyconf_cfg,
1459                                     src.pyconf.DOLLAR,
1460                                     'project_path + "/' + paths[path] + '"')
1461     
1462     # Modify some values
1463     if "project_path" not in project_pyconf_cfg:
1464         project_pyconf_cfg.addMapping("project_path",
1465                                       src.pyconf.Mapping(project_pyconf_cfg),
1466                                       "")
1467     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1468                                                            src.pyconf.DOLLAR,
1469                                                            'PWD')
1470     # we don't want to export these two fields
1471     project_pyconf_cfg.__delitem__("file_path")
1472     project_pyconf_cfg.__delitem__("PWD")
1473     if ftp_mode:
1474         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1475     
1476     # Write the project pyconf file
1477     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1478     ff = open(project_pyconf_tmp_path, 'w')
1479     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1480     project_pyconf_cfg.__save__(ff, 1)
1481     ff.close()
1482     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1483     
1484     return d_project
1485
1486 def add_readme(config, options, where):
1487     readme_path = os.path.join(where, "README")
1488     with codecs.open(readme_path, "w", 'utf-8') as f:
1489
1490     # templates for building the header
1491         readme_header="""
1492 # This package was generated with sat $version
1493 # Date: $date
1494 # User: $user
1495 # Distribution : $dist
1496
1497 In the following, $$ROOT represents the directory where you have installed 
1498 SALOME (the directory where this file is located).
1499
1500 """
1501         if src.architecture.is_windows():
1502             readme_header = readme_header.replace('$$ROOT','%ROOT%')
1503         readme_compilation_with_binaries="""
1504
1505 compilation based on the binaries used as prerequisites
1506 =======================================================
1507
1508 If you fail to compile the complete application (for example because
1509 you are not root on your system and cannot install missing packages), you
1510 may try a partial compilation based on the binaries.
1511 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1512 and do some substitutions on cmake and .la files (replace the build directories
1513 with local paths).
1514 The procedure to do it is:
1515  1) Remove or rename INSTALL directory if it exists
1516  2) Execute the shell script install_bin.sh:
1517  > cd $ROOT
1518  > ./install_bin.sh
1519  3) Use SalomeTool (as explained in Sources section) and compile only the 
1520     modules you need to (with -p option)
1521
1522 """
1523         readme_header_tpl=string.Template(readme_header)
1524         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1525                 "README_BIN.template")
1526         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1527                 "README_LAUNCHER.template")
1528         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1529                 "README_BIN_VIRTUAL_APP.template")
1530         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1531                 "README_SRC.template")
1532         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1533                 "README_PROJECT.template")
1534         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1535                 "README_SAT.template")
1536
1537         # prepare substitution dictionary
1538         d = dict()
1539         d['user'] = config.VARS.user
1540         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1541         d['version'] = src.get_salometool_version(config)
1542         d['dist'] = config.VARS.dist
1543         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1544
1545         if options.binaries or options.sources:
1546             d['application'] = config.VARS.application
1547             d['BINARIES']    = config.INTERNAL.config.binary_dir
1548             d['SEPARATOR'] = config.VARS.sep
1549             if src.architecture.is_windows():
1550                 d['operatingSystem'] = 'Windows'
1551                 d['PYTHON3'] = 'python3'
1552                 d['ROOT']    = '%ROOT%'
1553             else:
1554                 d['operatingSystem'] = 'Linux'
1555                 d['PYTHON3'] = ''
1556                 d['ROOT']    = '$ROOT'
1557             f.write("# Application: " + d['application'] + "\n")
1558             if 'KERNEL' in config.APPLICATION.products:
1559                 VersionSalome = src.get_salome_version(config)
1560                 # Case where SALOME has the launcher that uses the SalomeContext API
1561                 if VersionSalome >= 730:
1562                     d['launcher'] = config.APPLICATION.profile.launcher_name
1563                 else:
1564                     d['virtual_app'] = 'runAppli' # this info is not used now)
1565
1566         # write the specific sections
1567         if options.binaries:
1568             f.write(src.template.substitute(readme_template_path_bin, d))
1569             if "virtual_app" in d:
1570                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1571             if "launcher" in d:
1572                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1573
1574         if options.sources:
1575             f.write(src.template.substitute(readme_template_path_src, d))
1576
1577         if options.binaries and options.sources and not src.architecture.is_windows():
1578             f.write(readme_compilation_with_binaries)
1579
1580         if options.project:
1581             f.write(src.template.substitute(readme_template_path_pro, d))
1582
1583         if options.sat:
1584             f.write(src.template.substitute(readme_template_path_sat, d))
1585     
1586     return readme_path
1587
1588 def update_config(config, logger,  prop, value):
1589     '''Remove from config.APPLICATION.products the products that have the property given as input.
1590     
1591     :param config Config: The global config.
1592     :param prop str: The property to filter
1593     :param value str: The value of the property to filter
1594     '''
1595     # if there is no APPLICATION (ex sat package -t) : nothing to do
1596     if "APPLICATION" in config:
1597         l_product_to_remove = []
1598         for product_name in config.APPLICATION.products.keys():
1599             prod_cfg = src.product.get_product_config(config, product_name)
1600             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1601                 l_product_to_remove.append(product_name)
1602         for product_name in l_product_to_remove:
1603             config.APPLICATION.products.__delitem__(product_name)
1604             logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1605
1606 def description():
1607     '''method that is called when salomeTools is called with --help option.
1608     
1609     :return: The text to display for the package command description.
1610     :rtype: str
1611     '''
1612     return _("""
1613 The package command creates a tar file archive of a product.
1614 There are four kinds of archive, which can be mixed:
1615
1616  1 - The binary archive. 
1617      It contains the product installation directories plus a launcher.
1618  2 - The sources archive. 
1619      It contains the product archives, a project (the application plus salomeTools).
1620  3 - The project archive. 
1621      It contains a project (give the project file path as argument).
1622  4 - The salomeTools archive. 
1623      It contains code utility salomeTools.
1624
1625 example:
1626  >> sat package SALOME-master --binaries --sources""")
1627   
1628 def run(args, runner, logger):
1629     '''method that is called when salomeTools is called with package parameter.
1630     '''
1631     
1632     # Parse the options
1633     (options, args) = parser.parse_args(args)
1634
1635     
1636     # Check that a type of package is called, and only one
1637     all_option_types = (options.binaries,
1638                         options.sources,
1639                         options.project not in ["", None],
1640                         options.sat,
1641                         options.bin_products)
1642
1643     # Check if no option for package type
1644     if all_option_types.count(True) == 0:
1645         msg = _("Error: Precise a type for the package\nUse one of the "
1646                 "following options: --binaries, --sources, --project or"
1647                 " --salometools, --bin_products")
1648         logger.write(src.printcolors.printcError(msg), 1)
1649         logger.write("\n", 1)
1650         return 1
1651     do_create_package = options.binaries or options.sources or options.project or options.sat 
1652
1653     if options.bin_products:
1654         ret = bin_products_archives(runner.cfg, logger, options.with_vcs)
1655         if ret!=0:
1656             return ret
1657     if not do_create_package:
1658         return 0
1659
1660     # continue to create a tar.gz package 
1661
1662     # The repository where to put the package if not Binary or Source
1663     package_default_path = runner.cfg.LOCAL.workdir
1664     # if the package contains binaries or sources:
1665     if options.binaries or options.sources or options.bin_products:
1666         # Check that the command has been called with an application
1667         src.check_config_has_application(runner.cfg)
1668
1669         # Display information
1670         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1671                                                     runner.cfg.VARS.application), 1)
1672         
1673         # Get the default directory where to put the packages
1674         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1675         src.ensure_path_exists(package_default_path)
1676         
1677     # if the package contains a project:
1678     if options.project:
1679         # check that the project is visible by SAT
1680         projectNameFile = options.project + ".pyconf"
1681         foundProject = None
1682         for i in runner.cfg.PROJECTS.project_file_paths:
1683             baseName = os.path.basename(i)
1684             if baseName == projectNameFile:
1685                 foundProject = i
1686                 break
1687
1688         if foundProject is None:
1689             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1690             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1691 known projects are:
1692 %(2)s
1693
1694 Please add it in file:
1695 %(3)s""" % \
1696                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1697             logger.write(src.printcolors.printcError(msg), 1)
1698             logger.write("\n", 1)
1699             return 1
1700         else:
1701             options.project_file_path = foundProject
1702             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1703     
1704     # Remove the products that are filtered by the --without_properties option
1705     if options.without_properties:
1706         prop, value = options.without_properties
1707         update_config(runner.cfg, logger, prop, value)
1708
1709     # Remove from config the products that have the not_in_package property
1710     update_config(runner.cfg, logger, "not_in_package", "yes")
1711
1712     # get the name of the archive or build it
1713     if options.name:
1714         if os.path.basename(options.name) == options.name:
1715             # only a name (not a path)
1716             archive_name = options.name           
1717             dir_name = package_default_path
1718         else:
1719             archive_name = os.path.basename(options.name)
1720             dir_name = os.path.dirname(options.name)
1721         
1722         # suppress extension
1723         if archive_name[-len(".tgz"):] == ".tgz":
1724             archive_name = archive_name[:-len(".tgz")]
1725         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1726             archive_name = archive_name[:-len(".tar.gz")]
1727         
1728     else:
1729         archive_name=""
1730         dir_name = package_default_path
1731         if options.binaries or options.sources:
1732             archive_name = runner.cfg.APPLICATION.name
1733
1734         if options.binaries:
1735             archive_name += "-"+runner.cfg.VARS.dist
1736             
1737         if options.sources:
1738             archive_name += "-SRC"
1739             if options.with_vcs:
1740                 archive_name += "-VCS"
1741
1742         if options.sat:
1743             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1744
1745         if options.project:
1746             if options.sat:
1747                 archive_name += "_" 
1748             archive_name += ("satproject_" + options.project)
1749  
1750         if len(archive_name)==0: # no option worked 
1751             msg = _("Error: Cannot name the archive\n"
1752                     " check if at least one of the following options was "
1753                     "selected : --binaries, --sources, --project or"
1754                     " --salometools")
1755             logger.write(src.printcolors.printcError(msg), 1)
1756             logger.write("\n", 1)
1757             return 1
1758  
1759     path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1760     
1761     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1762
1763     # Create a working directory for all files that are produced during the
1764     # package creation and that will be removed at the end of the command
1765     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1766     src.ensure_path_exists(tmp_working_dir)
1767     logger.write("\n", 5)
1768     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1769     
1770     logger.write("\n", 3)
1771
1772     msg = _("Preparation of files to add to the archive")
1773     logger.write(src.printcolors.printcLabel(msg), 2)
1774     logger.write("\n", 2)
1775     
1776     d_files_to_add={}  # content of the archive
1777
1778     # a dict to hold paths that will need to be substitute for users recompilations
1779     d_paths_to_substitute={}  
1780
1781     if options.binaries:
1782         d_bin_files_to_add = binary_package(runner.cfg,
1783                                             logger,
1784                                             options,
1785                                             tmp_working_dir)
1786         # for all binaries dir, store the substitution that will be required 
1787         # for extra compilations
1788         for key in d_bin_files_to_add:
1789             if key.endswith("(bin)"):
1790                 source_dir = d_bin_files_to_add[key][0]
1791                 path_in_archive = d_bin_files_to_add[key][1].replace(
1792                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1793                    runner.cfg.INTERNAL.config.install_dir)
1794                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1795                     # if basename is the same we will just substitute the dirname 
1796                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1797                         os.path.dirname(path_in_archive)
1798                 else:
1799                     d_paths_to_substitute[source_dir]=path_in_archive
1800
1801         d_files_to_add.update(d_bin_files_to_add)
1802     if options.sources:
1803         d_files_to_add.update(source_package(runner,
1804                                         runner.cfg,
1805                                         logger, 
1806                                         options,
1807                                         tmp_working_dir))
1808         if options.binaries:
1809             # for archives with bin and sources we provide a shell script able to 
1810             # install binaries for compilation
1811             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1812                                                       tmp_working_dir,
1813                                                       d_paths_to_substitute,
1814                                                       "install_bin.sh")
1815             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1816             logger.write("substitutions that need to be done later : \n", 5)
1817             logger.write(str(d_paths_to_substitute), 5)
1818             logger.write("\n", 5)
1819     else:
1820         # --salomeTool option is not considered when --sources is selected, as this option
1821         # already brings salomeTool!
1822         if options.sat:
1823             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1824                                   options, logger))
1825         
1826     if options.project:
1827         DBG.write("config for package %s" % options.project, runner.cfg)
1828         d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1829
1830     if not(d_files_to_add):
1831         msg = _("Error: Empty dictionnary to build the archive!\n")
1832         logger.write(src.printcolors.printcError(msg), 1)
1833         logger.write("\n", 1)
1834         return 1
1835
1836     # Add the README file in the package
1837     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1838     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1839
1840     # Add the additional files of option add_files
1841     if options.add_files:
1842         for file_path in options.add_files:
1843             if not os.path.exists(file_path):
1844                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1845                 continue
1846             file_name = os.path.basename(file_path)
1847             d_files_to_add[file_name] = (file_path, file_name)
1848
1849     logger.write("\n", 2)
1850     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1851     logger.write("\n", 2)
1852     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1853
1854     res = 0
1855     try:
1856         # Creating the object tarfile
1857         tar = tarfile.open(path_targz, mode='w:gz')
1858         
1859         # get the filtering function if needed
1860         if old_python:
1861             filter_function = exclude_VCS_and_extensions_26
1862         else:
1863             filter_function = exclude_VCS_and_extensions
1864
1865         # Add the files to the tarfile object
1866         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1867         tar.close()
1868     except KeyboardInterrupt:
1869         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1870         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1871         # remove the working directory
1872         shutil.rmtree(tmp_working_dir)
1873         logger.write(_("OK"), 1)
1874         logger.write(_("\n"), 1)
1875         return 1
1876     
1877     # case if no application, only package sat as 'sat package -t'
1878     try:
1879         app = runner.cfg.APPLICATION
1880     except:
1881         app = None
1882
1883     # unconditionaly remove the tmp_local_working_dir
1884     if app is not None:
1885         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1886         if os.path.isdir(tmp_local_working_dir):
1887             shutil.rmtree(tmp_local_working_dir)
1888
1889     # remove the tmp directory, unless user has registered as developer
1890     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1891         shutil.rmtree(tmp_working_dir)
1892     
1893     # Print again the path of the package
1894     logger.write("\n", 2)
1895     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1896     
1897     return res