]> SALOME platform Git repositories - tools/sat.git/blob - commands/package.py
Salome HOME
sat #30348 : sous linux substitution aussi du path pour le cas du pv_plugin_path
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28 import sys
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
35
36 BINARY = "binary"
37 SOURCE = "Source"
38 PROJECT = "Project"
39 SAT = "Sat"
40
41 ARCHIVE_DIR = "ARCHIVES"
42 PROJECT_DIR = "PROJECT"
43
44 IGNORED_DIRS = [".git", ".svn"]
45 IGNORED_EXTENSIONS = []
46
47 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
48
49 if src.architecture.is_windows():
50     PROJECT_TEMPLATE = """#!/usr/bin/env python
51 #-*- coding:utf-8 -*-
52
53 # The path to the archive root directory
54 root_path : $PWD + "/../"
55 # path to the PROJECT
56 project_path : $PWD + "/"
57
58 # Where to search the archives of the products
59 ARCHIVEPATH : $root_path + "ARCHIVES"
60 # Where to search the pyconf of the applications
61 APPLICATIONPATH : $project_path + "applications/"
62 # Where to search the pyconf of the products
63 PRODUCTPATH : $project_path + "products/"
64 # Where to search the pyconf of the jobs of the project
65 JOBPATH : $project_path + "jobs/"
66 # Where to search the pyconf of the machines of the project
67 MACHINEPATH : $project_path + "machines/"
68 """
69 else:
70     PROJECT_TEMPLATE = """#!/usr/bin/env python
71 #-*- coding:utf-8 -*-
72
73 # path to the PROJECT
74 project_path : $PWD + "/"
75
76 # Where to search the archives of the products
77 ARCHIVEPATH : $project_path + "ARCHIVES"
78 # Where to search the pyconf of the applications
79 APPLICATIONPATH : $project_path + "applications/"
80 # Where to search the pyconf of the products
81 PRODUCTPATH : $project_path + "products/"
82 # Where to search the pyconf of the jobs of the project
83 JOBPATH : $project_path + "jobs/"
84 # Where to search the pyconf of the machines of the project
85 MACHINEPATH : $project_path + "machines/"
86 """
87
88
89 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
90 #-*- coding:utf-8 -*-
91
92   LOCAL :
93   {
94     base : 'default'
95     workdir : 'default'
96     log_dir : 'default'
97     archive_dir : 'default'
98     VCS : 'unknown'
99     tag : 'unknown'
100   }
101
102 PROJECTS :
103 {
104   project_file_paths : 
105   [
106 $LOCAL.workdir + $VARS.sep + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"
107   ]
108 }
109 """)
110
111 # Define all possible option for the package command :  sat package <options>
112 parser = src.options.Options()
113 parser.add_option('b', 'binaries', 'boolean', 'binaries',
114     _('Optional: Produce a binary package.'), False)
115 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
116     _('Optional: Only binary package: produce the archive even if '
117       'there are some missing products.'), False)
118 parser.add_option('s', 'sources', 'boolean', 'sources',
119     _('Optional: Produce a compilable archive of the sources of the '
120       'application.'), False)
121 parser.add_option('', 'bin_products', 'boolean', 'bin_products',
122     _('Optional: Create binary archives for all products.'), False)
123 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
124     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
125       'Sat prepare will use VCS mode instead to retrieve them.'
126       '\n          Also, when combined with "--bin_products" option, restrict the building of product archives to VCS products.'),
127     False)
128 parser.add_option('', 'ftp', 'boolean', 'ftp',
129     _('Optional: Do not embed archives for products in archive mode.' 
130     'Sat prepare will use ftp instead to retrieve them'),
131     False)
132 parser.add_option('e', 'exe', 'string', 'exe',
133     _('Optional: Produce an extra launcher based upon the exe given as argument.'), "")
134 parser.add_option('p', 'project', 'string', 'project',
135     _('Optional: Produce an archive that contains a project.'), "")
136 parser.add_option('t', 'salometools', 'boolean', 'sat',
137     _('Optional: Produce an archive that contains salomeTools.'), False)
138 parser.add_option('n', 'name', 'string', 'name',
139     _('Optional: The name or full path of the archive.'), None)
140 parser.add_option('', 'add_files', 'list2', 'add_files',
141     _('Optional: The list of additional files to add to the archive.'), [])
142 parser.add_option('', 'without_properties', 'properties', 'without_properties',
143     _('Optional: Filter the products by their properties.\n\tSyntax: '
144       '--without_properties <property>:<value>'))
145
146
147 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
148     '''Create an archive containing all directories and files that are given in
149        the d_content argument.
150     
151     :param tar tarfile: The tarfile instance used to make the archive.
152     :param name_archive str: The name of the archive to make.
153     :param d_content dict: The dictionary that contain all directories and files
154                            to add in the archive.
155                            d_content[label] = 
156                                         (path_on_local_machine, path_in_archive)
157     :param logger Logger: the logging instance
158     :param f_exclude Function: the function that filters
159     :return: 0 if success, 1 if not.
160     :rtype: int
161     '''
162     # get the max length of the messages in order to make the display
163     max_len = len(max(d_content.keys(), key=len))
164     
165     success = 0
166     # loop over each directory or file stored in the d_content dictionary
167     names = sorted(d_content.keys())
168     DBG.write("add tar names", names)
169
170     # used to avoid duplications (for pip install in python, or single_install_dir cases)
171     already_added=set() 
172     for name in names:
173         # display information
174         len_points = max_len - len(name) + 3
175         local_path, archive_path = d_content[name]
176         in_archive = os.path.join(name_archive, archive_path)
177         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
178         # Get the local path and the path in archive 
179         # of the directory or file to add
180         # Add it in the archive
181         try:
182             key=local_path+"->"+in_archive
183             if key not in already_added:
184                 if old_python:
185                     tar.add(local_path,
186                                  arcname=in_archive,
187                                  exclude=exclude_VCS_and_extensions_26)
188                 else:
189                     tar.add(local_path,
190                                  arcname=in_archive,
191                                  filter=exclude_VCS_and_extensions)
192                 already_added.add(key)
193             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
194         except Exception as e:
195             logger.write(src.printcolors.printcError(_("KO ")), 3)
196             logger.write(str(e), 3)
197             success = 1
198         logger.write("\n", 3)
199     return success
200
201
202 def exclude_VCS_and_extensions_26(filename):
203     ''' The function that is used to exclude from package the link to the 
204         VCS repositories (like .git) (only for python 2.6)
205
206     :param filename Str: The filname to exclude (or not).
207     :return: True if the file has to be exclude
208     :rtype: Boolean
209     '''
210     for dir_name in IGNORED_DIRS:
211         if dir_name in filename:
212             return True
213     for extension in IGNORED_EXTENSIONS:
214         if filename.endswith(extension):
215             return True
216     return False
217
218 def exclude_VCS_and_extensions(tarinfo):
219     ''' The function that is used to exclude from package the link to the 
220         VCS repositories (like .git)
221
222     :param filename Str: The filname to exclude (or not).
223     :return: None if the file has to be exclude
224     :rtype: tarinfo or None
225     '''
226     filename = tarinfo.name
227     for dir_name in IGNORED_DIRS:
228         if dir_name in filename:
229             return None
230     for extension in IGNORED_EXTENSIONS:
231         if filename.endswith(extension):
232             return None
233     return tarinfo
234
235 def produce_relative_launcher(config,
236                               logger,
237                               file_dir,
238                               file_name,
239                               binaries_dir_name):
240     '''Create a specific SALOME launcher for the binary package. This launcher 
241        uses relative paths.
242     
243     :param config Config: The global configuration.
244     :param logger Logger: the logging instance
245     :param file_dir str: the directory where to put the launcher
246     :param file_name str: The launcher name
247     :param binaries_dir_name str: the name of the repository where the binaries
248                                   are, in the archive.
249     :return: the path of the produced launcher
250     :rtype: str
251     '''
252     
253     # set base mode to "no" for the archive - save current mode to restore it at the end
254     if "base" in config.APPLICATION:
255         base_setting=config.APPLICATION.base 
256     else:
257         base_setting="maybe"
258     config.APPLICATION.base="no"
259
260     # get KERNEL installation path 
261     kernel_info = src.product.get_product_config(config, "KERNEL")
262     kernel_base_name=os.path.basename(kernel_info.install_dir)
263     if kernel_info.install_mode == "base":
264         # case of kernel installed in base. the kernel install dir name is different in the archive
265         kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
266     
267     kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
268
269     # set kernel bin dir (considering fhs property)
270     kernel_cfg = src.product.get_product_config(config, "KERNEL")
271     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
272         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
273     else:
274         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
275
276     # check if the application contains an application module
277     # check also if the application has a distene product, 
278     # in this case get its licence file name
279     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
280     salome_application_name="Not defined" 
281     distene_licence_file_name=False
282     for prod_name, prod_info in l_product_info:
283         # look for a "salome application" and a distene product
284         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
285             distene_licence_file_name = src.product.product_has_licence(prod_info, 
286                                             config.PATHS.LICENCEPATH) 
287         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
288             salome_application_name=prod_info.name
289
290     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
291     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
292     if salome_application_name == "Not defined":
293         app_root_dir=kernel_root_dir
294     else:
295         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
296
297     additional_env={}
298     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
299                                                    config.VARS.sep + bin_kernel_install_dir
300     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
301         additional_env['sat_python_version'] = 3
302     else:
303         additional_env['sat_python_version'] = 2
304
305     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
306     launcher_name = src.get_launcher_name(config)
307     additional_env['APPLI'] = "out_dir_Path" + config.VARS.sep + file_name
308
309     # create an environment file writer
310     writer = src.environment.FileEnvWriter(config,
311                                            logger,
312                                            file_dir,
313                                            src_root=None,
314                                            env_info=None)
315     
316     filepath = os.path.join(file_dir, file_name)
317     # Write
318     writer.write_env_file(filepath,
319                           False,  # for launch
320                           "cfgForPy",
321                           additional_env=additional_env,
322                           no_path_init=False,
323                           for_package = binaries_dir_name)
324     
325     # Little hack to put out_dir_Path outside the strings
326     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
327     src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
328     
329     # A hack to put a call to a file for distene licence.
330     # It does nothing to an application that has no distene product
331     if distene_licence_file_name:
332         logger.write("Application has a distene licence file! We use it in package launcher", 5)
333         hack_for_distene_licence(filepath, distene_licence_file_name)
334        
335     # change the rights in order to make the file executable for everybody
336     os.chmod(filepath,
337              stat.S_IRUSR |
338              stat.S_IRGRP |
339              stat.S_IROTH |
340              stat.S_IWUSR |
341              stat.S_IXUSR |
342              stat.S_IXGRP |
343              stat.S_IXOTH)
344
345     # restore modified setting by its initial value
346     config.APPLICATION.base=base_setting
347
348     return filepath
349
350 def hack_for_distene_licence(filepath, licence_file):
351     '''Replace the distene licence env variable by a call to a file.
352     
353     :param filepath Str: The path to the launcher to modify.
354     '''  
355     shutil.move(filepath, filepath + "_old")
356     fileout= filepath
357     filein = filepath + "_old"
358     fin = open(filein, "r")
359     fout = open(fileout, "w")
360     text = fin.readlines()
361     # Find the Distene section
362     num_line = -1
363     for i,line in enumerate(text):
364         if "# Set DISTENE License" in line:
365             num_line = i
366             break
367     if num_line == -1:
368         # No distene product, there is nothing to do
369         fin.close()
370         for line in text:
371             fout.write(line)
372         fout.close()
373         return
374     del text[num_line +1]
375     del text[num_line +1]
376     text_to_insert ="""    try:
377         distene_licence_file=r"%s"
378         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
379             import importlib.util
380             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
381             distene=importlib.util.module_from_spec(spec_dist)
382             spec_dist.loader.exec_module(distene)
383         else:
384             import imp
385             distene = imp.load_source('distene_licence', distene_licence_file)
386         distene.set_distene_variables(context)
387     except:
388         pass\n"""  % licence_file
389     text.insert(num_line + 1, text_to_insert)
390     for line in text:
391         fout.write(line)
392     fin.close()    
393     fout.close()
394     return
395     
396 def produce_relative_env_files(config,
397                               logger,
398                               file_dir,
399                               binaries_dir_name,
400                               exe_name=None):
401     '''Create some specific environment files for the binary package. These 
402        files use relative paths.
403     
404     :param config Config: The global configuration.
405     :param logger Logger: the logging instance
406     :param file_dir str: the directory where to put the files
407     :param binaries_dir_name str: the name of the repository where the binaries
408                                   are, in the archive.
409     :param exe_name str: if given generate a launcher executing exe_name
410     :return: the list of path of the produced environment files
411     :rtype: List
412     '''  
413
414     # set base mode to "no" for the archive - save current mode to restore it at the end
415     if "base" in config.APPLICATION:
416         base_setting=config.APPLICATION.base 
417     else:
418         base_setting="maybe"
419     config.APPLICATION.base="no"
420
421     # create an environment file writer
422     writer = src.environment.FileEnvWriter(config,
423                                            logger,
424                                            file_dir,
425                                            src_root=None)
426     
427     if src.architecture.is_windows():
428       shell = "bat"
429       filename  = "env_launch.bat"
430     else:
431       shell = "bash"
432       filename  = "env_launch.sh"
433
434     if exe_name:
435         filename=os.path.basename(exe_name)
436
437     # Write
438     filepath = writer.write_env_file(filename,
439                           False, # for launch
440                           shell,
441                           for_package = binaries_dir_name)
442
443     # Little hack to put out_dir_Path as environment variable
444     if src.architecture.is_windows() :
445       src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
446       src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
447       src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
448     else:
449       src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
450       src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
451       src.replace_in_file(filepath, ';out_dir_Path', ';${out_dir_Path}' )
452
453     if exe_name:
454         if src.architecture.is_windows():
455             cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
456         else:
457             cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
458         with open(filepath, "a") as exe_launcher:
459             exe_launcher.write(cmd)
460
461     # change the rights in order to make the file executable for everybody
462     os.chmod(filepath,
463              stat.S_IRUSR |
464              stat.S_IRGRP |
465              stat.S_IROTH |
466              stat.S_IWUSR |
467              stat.S_IXUSR |
468              stat.S_IXGRP |
469              stat.S_IXOTH)
470     
471     # restore modified setting by its initial value
472     config.APPLICATION.base=base_setting
473
474     return filepath
475
476 def produce_install_bin_file(config,
477                              logger,
478                              file_dir,
479                              d_sub,
480                              file_name):
481     '''Create a bash shell script which do substitutions in BIRARIES dir 
482        in order to use it for extra compilations.
483     
484     :param config Config: The global configuration.
485     :param logger Logger: the logging instance
486     :param file_dir str: the directory where to put the files
487     :param d_sub, dict: the dictionnary that contains the substitutions to be done
488     :param file_name str: the name of the install script file
489     :return: the produced file
490     :rtype: str
491     '''  
492     # Write
493     filepath = os.path.join(file_dir, file_name)
494     # open the file and write into it
495     # use codec utf-8 as sat variables are in unicode
496     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
497         installbin_template_path = os.path.join(config.VARS.internal_dir,
498                                         "INSTALL_BIN.template")
499         
500         # build the name of the directory that will contain the binaries
501         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
502         # build the substitution loop
503         loop_cmd = "for f in $(grep -RIl"
504         for key in d_sub:
505             loop_cmd += " -e "+ key
506         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
507                     '); do\n     sed -i "\n'
508         for key in d_sub:
509             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
510         loop_cmd += '            " $f\ndone'
511
512         d={}
513         d["BINARIES_DIR"] = binaries_dir_name
514         d["SUBSTITUTION_LOOP"]=loop_cmd
515         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
516         
517         # substitute the template and write it in file
518         content=src.template.substitute(installbin_template_path, d)
519         installbin_file.write(content)
520         # change the rights in order to make the file executable for everybody
521         os.chmod(filepath,
522                  stat.S_IRUSR |
523                  stat.S_IRGRP |
524                  stat.S_IROTH |
525                  stat.S_IWUSR |
526                  stat.S_IXUSR |
527                  stat.S_IXGRP |
528                  stat.S_IXOTH)
529     
530     return filepath
531
532 def product_appli_creation_script(config,
533                                   logger,
534                                   file_dir,
535                                   binaries_dir_name):
536     '''Create a script that can produce an application (EDF style) in the binary
537        package.
538     
539     :param config Config: The global configuration.
540     :param logger Logger: the logging instance
541     :param file_dir str: the directory where to put the file
542     :param binaries_dir_name str: the name of the repository where the binaries
543                                   are, in the archive.
544     :return: the path of the produced script file
545     :rtype: Str
546     '''
547     template_name = "create_appli.py.for_bin_packages.template"
548     template_path = os.path.join(config.VARS.internal_dir, template_name)
549     text_to_fill = open(template_path, "r").read()
550     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
551                                         '"' + binaries_dir_name + '"')
552     
553     text_to_add = ""
554     for product_name in get_SALOME_modules(config):
555         product_info = src.product.get_product_config(config, product_name)
556        
557         if src.product.product_is_smesh_plugin(product_info):
558             continue
559
560         if 'install_dir' in product_info and bool(product_info.install_dir):
561             if src.product.product_is_cpp(product_info):
562                 # cpp module
563                 for cpp_name in src.product.get_product_components(product_info):
564                     line_to_add = ("<module name=\"" + 
565                                    cpp_name + 
566                                    "\" gui=\"yes\" path=\"''' + "
567                                    "os.path.join(dir_bin_name, \"" + 
568                                    cpp_name + "\") + '''\"/>")
569             else:
570                 # regular module
571                 line_to_add = ("<module name=\"" + 
572                                product_name + 
573                                "\" gui=\"yes\" path=\"''' + "
574                                "os.path.join(dir_bin_name, \"" + 
575                                product_name + "\") + '''\"/>")
576             text_to_add += line_to_add + "\n"
577     
578     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
579     
580     tmp_file_path = os.path.join(file_dir, "create_appli.py")
581     ff = open(tmp_file_path, "w")
582     ff.write(filled_text)
583     ff.close()
584     
585     # change the rights in order to make the file executable for everybody
586     os.chmod(tmp_file_path,
587              stat.S_IRUSR |
588              stat.S_IRGRP |
589              stat.S_IROTH |
590              stat.S_IWUSR |
591              stat.S_IXUSR |
592              stat.S_IXGRP |
593              stat.S_IXOTH)
594     
595     return tmp_file_path
596
597 def bin_products_archives(config, logger, only_vcs):
598     '''Prepare binary packages for all products
599     :param config Config: The global configuration.
600     :return: the error status
601     :rtype: bool
602     '''
603
604     logger.write("Make %s binary archives\n" % config.VARS.dist)
605     # Get the default directory where to put the packages
606     binpackage_path = os.path.join(config.APPLICATION.workdir, "PACKAGE", "products")
607     src.ensure_path_exists(binpackage_path)
608     # Get the list of product installation to add to the archive
609     l_products_name = sorted(config.APPLICATION.products.keys())
610     l_product_info = src.product.get_products_infos(l_products_name,
611                                                     config)
612     # first loop on products : filter products, analyse properties,
613     # and store the information that will be used to create the archive in the second loop 
614     l_not_installed=[] # store not installed products for warning at the end
615     for prod_name, prod_info in l_product_info:
616         # ignore the native and fixed products for install directories
617         if (src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
618                 or src.product.product_is_native(prod_info) 
619                 or src.product.product_is_fixed(prod_info)
620                 or not src.product.product_compiles(prod_info)):
621             continue
622         if only_vcs and not src.product.product_is_vcs(prod_info):
623             continue
624         if not src.product.check_installation(config, prod_info):
625             l_not_installed.append(prod_name)
626             continue  # product is not installed, we skip it
627         # prepare call to make_bin_archive
628         path_targz_prod = os.path.join(binpackage_path, prod_name + '-' + prod_info.version.replace("/", "_") + "-" + config.VARS.dist + PACKAGE_EXT)
629         targz_prod = tarfile.open(path_targz_prod, mode='w:gz')
630         bin_path = prod_info.install_dir
631         targz_prod.add(bin_path)
632         targz_prod.close()
633         # Python program to find MD5 hash value of a file
634         import hashlib
635         with open(path_targz_prod,"rb") as f:
636             bytes = f.read() # read file as bytes
637             readable_hash = hashlib.md5(bytes).hexdigest();
638             with open(path_targz_prod+".md5", "w") as md5sum:
639                md5sum.write("%s  %s" % (readable_hash, os.path.basename(path_targz_prod))) 
640             logger.write("   archive : %s   (md5sum = %s)\n" % (path_targz_prod, readable_hash))
641
642     return 0
643
644 def binary_package(config, logger, options, tmp_working_dir):
645     '''Prepare a dictionary that stores all the needed directories and files to
646        add in a binary package.
647     
648     :param config Config: The global configuration.
649     :param logger Logger: the logging instance
650     :param options OptResult: the options of the launched command
651     :param tmp_working_dir str: The temporary local directory containing some 
652                                 specific directories or files needed in the 
653                                 binary package
654     :return: the dictionary that stores all the needed directories and files to
655              add in a binary package.
656              {label : (path_on_local_machine, path_in_archive)}
657     :rtype: dict
658     '''
659
660     # Get the list of product installation to add to the archive
661     l_products_name = sorted(config.APPLICATION.products.keys())
662     l_product_info = src.product.get_products_infos(l_products_name,
663                                                     config)
664
665     # suppress compile time products for binaries-only archives
666     if not options.sources:
667         update_config(config, logger, "compile_time", "yes")
668
669     l_install_dir = []
670     l_source_dir = []
671     l_not_installed = []
672     l_sources_not_present = []
673     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
674     if ("APPLICATION" in config  and
675         "properties"  in config.APPLICATION  and
676         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
677         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
678             generate_mesa_launcher=True
679
680     # first loop on products : filter products, analyse properties,
681     # and store the information that will be used to create the archive in the second loop 
682     for prod_name, prod_info in l_product_info:
683         # skip product with property not_in_package set to yes
684         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
685             continue  
686
687         # Add the sources of the products that have the property 
688         # sources_in_package : "yes"
689         if src.get_property_in_product_cfg(prod_info,
690                                            "sources_in_package") == "yes":
691             if os.path.exists(prod_info.source_dir):
692                 l_source_dir.append((prod_name, prod_info.source_dir))
693             else:
694                 l_sources_not_present.append(prod_name)
695
696         # ignore the native and fixed products for install directories
697         if (src.product.product_is_native(prod_info) 
698                 or src.product.product_is_fixed(prod_info)
699                 or not src.product.product_compiles(prod_info)):
700             continue
701         # 
702         # products with single_dir property will be installed in the PRODUCTS directory of the archive
703         is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
704                        src.product.product_test_property(prod_info,"single_install_dir", "yes"))
705         if src.product.check_installation(config, prod_info):
706             l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir,
707                                   is_single_dir, prod_info.install_mode))
708         else:
709             l_not_installed.append(prod_name)
710         
711         # Add also the cpp generated modules (if any)
712         if src.product.product_is_cpp(prod_info):
713             # cpp module
714             for name_cpp in src.product.get_product_components(prod_info):
715                 install_dir = os.path.join(config.APPLICATION.workdir,
716                                            config.INTERNAL.config.install_dir,
717                                            name_cpp) 
718                 if os.path.exists(install_dir):
719                     l_install_dir.append((name_cpp, name_cpp, install_dir, False, "value"))
720                 else:
721                     l_not_installed.append(name_cpp)
722         
723     # check the name of the directory that (could) contains the binaries 
724     # from previous detar
725     binaries_from_detar = os.path.join(
726                               config.APPLICATION.workdir,
727                               config.INTERNAL.config.binary_dir + config.VARS.dist)
728     if os.path.exists(binaries_from_detar):
729          logger.write("""
730 WARNING: existing binaries directory from previous detar installation:
731          %s
732          To make new package from this, you have to: 
733          1) install binaries in INSTALL directory with the script "install_bin.sh" 
734             see README file for more details
735          2) or recompile everything in INSTALL with "sat compile" command 
736             this step is long, and requires some linux packages to be installed 
737             on your system\n
738 """ % binaries_from_detar)
739     
740     # Print warning or error if there are some missing products
741     if len(l_not_installed) > 0:
742         text_missing_prods = ""
743         for p_name in l_not_installed:
744             text_missing_prods += " - " + p_name + "\n"
745         if not options.force_creation:
746             msg = _("ERROR: there are missing product installations:")
747             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
748                                      text_missing_prods),
749                          1)
750             raise src.SatException(msg)
751         else:
752             msg = _("WARNING: there are missing products installations:")
753             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
754                                      text_missing_prods),
755                          1)
756
757     # Do the same for sources
758     if len(l_sources_not_present) > 0:
759         text_missing_prods = ""
760         for p_name in l_sources_not_present:
761             text_missing_prods += "-" + p_name + "\n"
762         if not options.force_creation:
763             msg = _("ERROR: there are missing product sources:")
764             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
765                                      text_missing_prods),
766                          1)
767             raise src.SatException(msg)
768         else:
769             msg = _("WARNING: there are missing products sources:")
770             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
771                                      text_missing_prods),
772                          1)
773  
774     # construct the name of the directory that will contain the binaries
775     if src.architecture.is_windows():
776         binaries_dir_name = config.INTERNAL.config.binary_dir
777     else:
778         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
779     # construct the correlation table between the product names, there 
780     # actual install directories and there install directory in archive
781     d_products = {}
782     for prod_name, prod_info_name, install_dir, is_single_dir, install_mode in l_install_dir:
783         prod_base_name=os.path.basename(install_dir)
784         if install_mode == "base":
785             # case of a products installed in base. 
786             # because the archive is in base:no mode, the name of the install dir is different inside archive
787             # we set it to the product name or by PRODUCTS if single-dir
788             if is_single_dir:
789                 prod_base_name=config.INTERNAL.config.single_install_dir
790             else:
791                 prod_base_name=prod_info_name
792         path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
793         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
794         
795     for prod_name, source_dir in l_source_dir:
796         path_in_archive = os.path.join("SOURCES", prod_name)
797         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
798
799     # create an archives of compilation logs, and insert it into the tarball
800     logpath=os.path.join(config.APPLICATION.workdir, "LOGS")
801     path_targz_logs = os.path.join(tmp_working_dir, "logs.tgz")
802     tar_log = tarfile.open(path_targz_logs, mode='w:gz')
803     tar_log.add(logpath, arcname="LOGS")
804     tar_log.close()
805     d_products["LOGS"] = (path_targz_logs, "logs.tgz")
806  
807     # for packages of SALOME applications including KERNEL, 
808     # we produce a salome launcher or a virtual application (depending on salome version)
809     if 'KERNEL' in config.APPLICATION.products:
810         VersionSalome = src.get_salome_version(config)
811         # Case where SALOME has the launcher that uses the SalomeContext API
812         if VersionSalome >= 730:
813             # create the relative launcher and add it to the files to add
814             launcher_name = src.get_launcher_name(config)
815             launcher_package = produce_relative_launcher(config,
816                                                  logger,
817                                                  tmp_working_dir,
818                                                  launcher_name,
819                                                  binaries_dir_name)
820             d_products["launcher"] = (launcher_package, launcher_name)
821
822             # if the application contains mesa products, we generate in addition to the 
823             # classical salome launcher a launcher using mesa and called mesa_salome 
824             # (the mesa launcher will be used for remote usage through ssh).
825             if generate_mesa_launcher:
826                 #if there is one : store the use_mesa property
827                 restore_use_mesa_option=None
828                 if ('properties' in config.APPLICATION and 
829                     'use_mesa' in config.APPLICATION.properties):
830                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
831
832                 # activate mesa property, and generate a mesa launcher
833                 src.activate_mesa_property(config)  #activate use_mesa property
834                 launcher_mesa_name="mesa_"+launcher_name
835                 launcher_package_mesa = produce_relative_launcher(config,
836                                                      logger,
837                                                      tmp_working_dir,
838                                                      launcher_mesa_name,
839                                                      binaries_dir_name)
840                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
841
842                 # if there was a use_mesa value, we restore it
843                 # else we set it to the default value "no"
844                 if restore_use_mesa_option != None:
845                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
846                 else:
847                     config.APPLICATION.properties.use_mesa="no"
848
849             if options.sources:
850                 # if we mix binaries and sources, we add a copy of the launcher, 
851                 # prefixed  with "bin",in order to avoid clashes
852                 launcher_copy_name="bin"+launcher_name
853                 launcher_package_copy = produce_relative_launcher(config,
854                                                      logger,
855                                                      tmp_working_dir,
856                                                      launcher_copy_name,
857                                                      binaries_dir_name)
858                 d_products["launcher (copy)"] = (launcher_package_copy, launcher_copy_name)
859         else:
860             # Provide a script for the creation of an application EDF style
861             appli_script = product_appli_creation_script(config,
862                                                         logger,
863                                                         tmp_working_dir,
864                                                         binaries_dir_name)
865             
866             d_products["appli script"] = (appli_script, "create_appli.py")
867
868     # Put also the environment file
869     env_file = produce_relative_env_files(config,
870                                            logger,
871                                            tmp_working_dir,
872                                            binaries_dir_name)
873
874     if src.architecture.is_windows():
875       filename  = "env_launch.bat"
876     else:
877       filename  = "env_launch.sh"
878     d_products["environment file"] = (env_file, filename)      
879
880     # If option exe, produce an extra launcher based on specified exe
881     if options.exe:
882         exe_file = produce_relative_env_files(config,
883                                               logger,
884                                               tmp_working_dir,
885                                               binaries_dir_name,
886                                               options.exe)
887             
888         if src.architecture.is_windows():
889           filename  = os.path.basename(options.exe) + ".bat"
890         else:
891           filename  = os.path.basename(options.exe) + ".sh"
892         d_products["exe file"] = (exe_file, filename)      
893     
894
895     return d_products
896
897 def source_package(sat, config, logger, options, tmp_working_dir):
898     '''Prepare a dictionary that stores all the needed directories and files to
899        add in a source package.
900     
901     :param config Config: The global configuration.
902     :param logger Logger: the logging instance
903     :param options OptResult: the options of the launched command
904     :param tmp_working_dir str: The temporary local directory containing some 
905                                 specific directories or files needed in the 
906                                 binary package
907     :return: the dictionary that stores all the needed directories and files to
908              add in a source package.
909              {label : (path_on_local_machine, path_in_archive)}
910     :rtype: dict
911     '''
912     
913     d_archives={}
914     # Get all the products that are prepared using an archive
915     # unless ftp mode is specified (in this case the user of the
916     # archive will get the sources through the ftp mode of sat prepare
917     if not options.ftp:
918         logger.write("Find archive products ... ")
919         d_archives, l_pinfo_vcs = get_archives(config, logger)
920         logger.write("Done\n")
921
922     d_archives_vcs = {}
923     if not options.with_vcs and len(l_pinfo_vcs) > 0:
924         # Make archives with the products that are not prepared using an archive
925         # (git, cvs, svn, etc)
926         logger.write("Construct archives for vcs products ... ")
927         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
928                                           sat,
929                                           config,
930                                           logger,
931                                           tmp_working_dir)
932         logger.write("Done\n")
933
934     # Create a project
935     logger.write("Create the project ... ")
936     d_project = create_project_for_src_package(config,
937                                                tmp_working_dir,
938                                                options.with_vcs,
939                                                options.ftp)
940     logger.write("Done\n")
941     
942     # Add salomeTools
943     tmp_sat = add_salomeTools(config, tmp_working_dir)
944     d_sat = {"salomeTools" : (tmp_sat, "sat")}
945     
946     # Add a sat symbolic link if not win
947     if not src.architecture.is_windows():
948         try:
949             t = os.getcwd()
950         except:
951             # In the jobs, os.getcwd() can fail
952             t = config.LOCAL.workdir
953         os.chdir(tmp_working_dir)
954
955         # create a symlink, to avoid reference with "salomeTool/.."
956         os.chdir("PROJECT")
957         if os.path.lexists("ARCHIVES"):
958             os.remove("ARCHIVES")
959         os.symlink("../ARCHIVES", "ARCHIVES")
960         os.chdir(t)
961         
962         d_sat["sat archive link"] = (os.path.join(tmp_working_dir,"PROJECT", "ARCHIVES"), 
963                                      os.path.join("PROJECT", "ARCHIVES"))
964     
965     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
966     return d_source
967
968 def get_archives(config, logger):
969     '''Find all the products that are get using an archive and all the products
970        that are get using a vcs (git, cvs, svn) repository.
971     
972     :param config Config: The global configuration.
973     :param logger Logger: the logging instance
974     :return: the dictionary {name_product : 
975              (local path of its archive, path in the package of its archive )}
976              and the list of specific configuration corresponding to the vcs 
977              products
978     :rtype: (Dict, List)
979     '''
980     # Get the list of product informations
981     l_products_name = config.APPLICATION.products.keys()
982     l_product_info = src.product.get_products_infos(l_products_name,
983                                                     config)
984     d_archives = {}
985     l_pinfo_vcs = []
986     for p_name, p_info in l_product_info:
987         # skip product with property not_in_package set to yes
988         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
989             continue  
990         # ignore the native and fixed products
991         if (src.product.product_is_native(p_info) 
992                 or src.product.product_is_fixed(p_info)):
993             continue
994         if p_info.get_source == "archive":
995             archive_path = p_info.archive_info.archive_name
996             archive_name = os.path.basename(archive_path)
997             d_archives[p_name] = (archive_path,
998                                   os.path.join(ARCHIVE_DIR, archive_name))
999             if (src.appli_test_property(config,"pip", "yes") and 
1000                 src.product.product_test_property(p_info,"pip", "yes")):
1001                 # if pip mode is activated, and product is managed by pip
1002                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
1003                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
1004                     "%s-%s*" % (p_info.name, p_info.version))
1005                 pip_wheel_path=glob.glob(pip_wheel_pattern)
1006                 msg_pip_not_found="Error in get_archive, pip wheel for "\
1007                                   "product %s-%s was not found in %s directory"
1008                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
1009                                   "product %s-%s were found in %s directory"
1010                 if len(pip_wheel_path)==0:
1011                     raise src.SatException(msg_pip_not_found %\
1012                         (p_info.name, p_info.version, pip_wheels_dir))
1013                 if len(pip_wheel_path)>1:
1014                     raise src.SatException(msg_pip_two_or_more %\
1015                         (p_info.name, p_info.version, pip_wheels_dir))
1016
1017                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
1018                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
1019                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
1020         else:
1021             # this product is not managed by archive, 
1022             # an archive of the vcs directory will be created by get_archive_vcs
1023             l_pinfo_vcs.append((p_name, p_info)) 
1024             
1025     return d_archives, l_pinfo_vcs
1026
1027 def add_salomeTools(config, tmp_working_dir):
1028     '''Prepare a version of salomeTools that has a specific local.pyconf file 
1029        configured for a source package.
1030
1031     :param config Config: The global configuration.
1032     :param tmp_working_dir str: The temporary local directory containing some 
1033                                 specific directories or files needed in the 
1034                                 source package
1035     :return: The path to the local salomeTools directory to add in the package
1036     :rtype: str
1037     '''
1038     # Copy sat in the temporary working directory
1039     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
1040     sat_running_path = src.Path(config.VARS.salometoolsway)
1041     sat_running_path.copy(sat_tmp_path)
1042     
1043     # Update the local.pyconf file that contains the path to the project
1044     local_pyconf_name = "local.pyconf"
1045     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
1046     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
1047     # Remove the .pyconf file in the root directory of salomeTools if there is
1048     # any. (For example when launching jobs, a pyconf file describing the jobs 
1049     # can be here and is not useful) 
1050     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
1051     for file_or_dir in files_or_dir_SAT:
1052         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
1053             file_path = os.path.join(tmp_working_dir,
1054                                      "salomeTools",
1055                                      file_or_dir)
1056             os.remove(file_path)
1057     
1058     ff = open(local_pyconf_file, "w")
1059     ff.write(LOCAL_TEMPLATE)
1060     ff.close()
1061     
1062     return sat_tmp_path.path
1063
1064 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
1065     '''For sources package that require that all products are get using an 
1066        archive, one has to create some archive for the vcs products.
1067        So this method calls the clean and source command of sat and then create
1068        the archives.
1069
1070     :param l_pinfo_vcs List: The list of specific configuration corresponding to
1071                              each vcs product
1072     :param sat Sat: The Sat instance that can be called to clean and source the
1073                     products
1074     :param config Config: The global configuration.
1075     :param logger Logger: the logging instance
1076     :param tmp_working_dir str: The temporary local directory containing some 
1077                                 specific directories or files needed in the 
1078                                 source package
1079     :return: the dictionary that stores all the archives to add in the source 
1080              package. {label : (path_on_local_machine, path_in_archive)}
1081     :rtype: dict
1082     '''
1083     # clean the source directory of all the vcs products, then use the source 
1084     # command and thus construct an archive that will not contain the patches
1085     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
1086     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
1087       logger.write(_("\nclean sources\n"))
1088       args_clean = config.VARS.application
1089       args_clean += " --sources --products "
1090       args_clean += ",".join(l_prod_names)
1091       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
1092       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
1093     if True:
1094       # source
1095       logger.write(_("get sources\n"))
1096       args_source = config.VARS.application
1097       args_source += " --products "
1098       args_source += ",".join(l_prod_names)
1099       svgDir = sat.cfg.APPLICATION.workdir
1100       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
1101       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
1102       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
1103       # DBG.write("sat config id", id(sat.cfg), True)
1104       # shit as config is not same id() as for sat.source()
1105       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
1106       import source
1107       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
1108       
1109       # make the new archives
1110       d_archives_vcs = {}
1111       for pn, pinfo in l_pinfo_vcs:
1112           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
1113           logger.write("make archive vcs '%s'\n" % path_archive)
1114           d_archives_vcs[pn] = (path_archive,
1115                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
1116       sat.cfg.APPLICATION.workdir = svgDir
1117       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
1118     return d_archives_vcs
1119
1120 def make_bin_archive(prod_name, prod_info, where):
1121     '''Create an archive of a product by searching its source directory.
1122
1123     :param prod_name str: The name of the product.
1124     :param prod_info Config: The specific configuration corresponding to the 
1125                              product
1126     :param where str: The path of the repository where to put the resulting 
1127                       archive
1128     :return: The path of the resulting archive
1129     :rtype: str
1130     '''
1131     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1132     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1133     bin_path = prod_info.install_dir
1134     tar_prod.add(bin_path, arcname=path_targz_prod)
1135     tar_prod.close()
1136     return path_targz_prod       
1137
1138 def make_archive(prod_name, prod_info, where):
1139     '''Create an archive of a product by searching its source directory.
1140
1141     :param prod_name str: The name of the product.
1142     :param prod_info Config: The specific configuration corresponding to the 
1143                              product
1144     :param where str: The path of the repository where to put the resulting 
1145                       archive
1146     :return: The path of the resulting archive
1147     :rtype: str
1148     '''
1149     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1150     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1151     local_path = prod_info.source_dir
1152     if old_python:
1153         tar_prod.add(local_path,
1154                      arcname=prod_name,
1155                      exclude=exclude_VCS_and_extensions_26)
1156     else:
1157         tar_prod.add(local_path,
1158                      arcname=prod_name,
1159                      filter=exclude_VCS_and_extensions)
1160     tar_prod.close()
1161     return path_targz_prod       
1162
1163 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
1164     '''Create a specific project for a source package.
1165
1166     :param config Config: The global configuration.
1167     :param tmp_working_dir str: The temporary local directory containing some 
1168                                 specific directories or files needed in the 
1169                                 source package
1170     :param with_vcs boolean: True if the package is with vcs products (not 
1171                              transformed into archive products)
1172     :param with_ftp boolean: True if the package use ftp servers to get archives
1173     :return: The dictionary 
1174              {"project" : (produced project, project path in the archive)}
1175     :rtype: Dict
1176     '''
1177
1178     # Create in the working temporary directory the full project tree
1179     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1180     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
1181                                          "products")
1182     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
1183                                          "products",
1184                                          "compil_scripts")
1185     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
1186                                          "products",
1187                                          "env_scripts")
1188     patches_tmp_dir = os.path.join(project_tmp_dir,
1189                                          "products",
1190                                          "patches")
1191     application_tmp_dir = os.path.join(project_tmp_dir,
1192                                          "applications")
1193     for directory in [project_tmp_dir,
1194                       compil_scripts_tmp_dir,
1195                       env_scripts_tmp_dir,
1196                       patches_tmp_dir,
1197                       application_tmp_dir]:
1198         src.ensure_path_exists(directory)
1199
1200     # Create the pyconf that contains the information of the project
1201     project_pyconf_name = "project.pyconf"        
1202     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1203     ff = open(project_pyconf_file, "w")
1204     ff.write(PROJECT_TEMPLATE)
1205     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1206         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
1207         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1208             ftp_path=ftp_path+":"+ftpserver
1209         ftp_path+='"'
1210         ff.write("# ftp servers where to search for prerequisite archives\n")
1211         ff.write(ftp_path)
1212     # add licence paths if any
1213     if len(config.PATHS.LICENCEPATH) > 0:  
1214         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1215         for path in config.PATHS.LICENCEPATH[1:]:
1216             licence_path=licence_path+":"+path
1217         licence_path+='"'
1218         ff.write("\n# Where to search for licences\n")
1219         ff.write(licence_path)
1220         
1221
1222     ff.close()
1223     
1224     # Loop over the products to get there pyconf and all the scripts 
1225     # (compilation, environment, patches)
1226     # and create the pyconf file to add to the project
1227     lproducts_name = config.APPLICATION.products.keys()
1228     l_products = src.product.get_products_infos(lproducts_name, config)
1229     for p_name, p_info in l_products:
1230         # skip product with property not_in_package set to yes
1231         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1232             continue  
1233         find_product_scripts_and_pyconf(p_name,
1234                                         p_info,
1235                                         config,
1236                                         with_vcs,
1237                                         compil_scripts_tmp_dir,
1238                                         env_scripts_tmp_dir,
1239                                         patches_tmp_dir,
1240                                         products_pyconf_tmp_dir)
1241     
1242     # for the application pyconf, we write directly the config
1243     # don't search for the original pyconf file
1244     # to avoid problems with overwrite sections and rm_products key
1245     write_application_pyconf(config, application_tmp_dir)
1246     
1247     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1248     return d_project
1249
1250 def find_product_scripts_and_pyconf(p_name,
1251                                     p_info,
1252                                     config,
1253                                     with_vcs,
1254                                     compil_scripts_tmp_dir,
1255                                     env_scripts_tmp_dir,
1256                                     patches_tmp_dir,
1257                                     products_pyconf_tmp_dir):
1258     '''Create a specific pyconf file for a given product. Get its environment 
1259        script, its compilation script and patches and put it in the temporary
1260        working directory. This method is used in the source package in order to
1261        construct the specific project.
1262
1263     :param p_name str: The name of the product.
1264     :param p_info Config: The specific configuration corresponding to the 
1265                              product
1266     :param config Config: The global configuration.
1267     :param with_vcs boolean: True if the package is with vcs products (not 
1268                              transformed into archive products)
1269     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1270                                        scripts directory of the project.
1271     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1272                                     directory of the project.
1273     :param patches_tmp_dir str: The path to the temporary patch scripts 
1274                                 directory of the project.
1275     :param products_pyconf_tmp_dir str: The path to the temporary product 
1276                                         scripts directory of the project.
1277     '''
1278     
1279     # read the pyconf of the product
1280     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1281
1282     # find the compilation script if any
1283     if src.product.product_has_script(p_info):
1284         compil_script_path = src.Path(p_info.compil_script)
1285         compil_script_path.copy(compil_scripts_tmp_dir)
1286
1287     # find the environment script if any
1288     if src.product.product_has_env_script(p_info):
1289         env_script_path = src.Path(p_info.environ.env_script)
1290         env_script_path.copy(env_scripts_tmp_dir)
1291
1292     # find the patches if any
1293     if src.product.product_has_patches(p_info):
1294         patches = src.pyconf.Sequence()
1295         for patch_path in p_info.patches:
1296             p_path = src.Path(patch_path)
1297             p_path.copy(patches_tmp_dir)
1298             patches.append(os.path.basename(patch_path), "")
1299
1300     if (not with_vcs) and src.product.product_is_vcs(p_info):
1301         # in non vcs mode, if the product is not archive, then make it become archive.
1302
1303         # depending upon the incremental mode, select impacted sections
1304         if "properties" in p_info and "incremental" in p_info.properties and\
1305             p_info.properties.incremental == "yes":
1306             sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1307         else:
1308             sections = [p_info.section]
1309         for section in sections:
1310             if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1311                 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1312                           (p_name,section))
1313                 product_pyconf_cfg[section].get_source = "archive"
1314                 if not "archive_info" in product_pyconf_cfg[section]:
1315                     product_pyconf_cfg[section].addMapping("archive_info",
1316                                         src.pyconf.Mapping(product_pyconf_cfg),
1317                                         "")
1318                     product_pyconf_cfg[section].archive_info.archive_name =\
1319                         p_info.name + ".tgz"
1320     
1321     if (with_vcs) and src.product.product_is_vcs(p_info):
1322         # in vcs mode we must replace explicitely the git server url
1323         # (or it will not be found later because project files are not exported in archives)
1324         for section in product_pyconf_cfg:
1325             # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
1326             if "git_info" in product_pyconf_cfg[section]:
1327                 for repo in product_pyconf_cfg[section].git_info:
1328                     if repo in p_info.git_info:
1329                         product_pyconf_cfg[section].git_info[repo] =  p_info.git_info[repo]
1330
1331     # write the pyconf file to the temporary project location
1332     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1333                                            p_name + ".pyconf")
1334     ff = open(product_tmp_pyconf_path, 'w')
1335     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1336     product_pyconf_cfg.__save__(ff, 1)
1337     ff.close()
1338
1339
1340 def write_application_pyconf(config, application_tmp_dir):
1341     '''Write the application pyconf file in the specific temporary 
1342        directory containing the specific project of a source package.
1343
1344     :param config Config: The global configuration.
1345     :param application_tmp_dir str: The path to the temporary application 
1346                                     scripts directory of the project.
1347     '''
1348     application_name = config.VARS.application
1349     # write the pyconf file to the temporary application location
1350     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1351                                                application_name + ".pyconf")
1352     with open(application_tmp_pyconf_path, 'w') as f:
1353         f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1354         res = src.pyconf.Config()
1355         app = src.pyconf.deepCopyMapping(config.APPLICATION)
1356
1357         # set base mode to "no" for the archive
1358         app.base = "no"
1359
1360         # Change the workdir
1361         app.workdir = src.pyconf.Reference(
1362                                  app,
1363                                  src.pyconf.DOLLAR,
1364                                  'LOCAL.workdir')
1365         res.addMapping("APPLICATION", app, "")
1366         res.__save__(f, evaluated=False)
1367     
1368
1369 def sat_package(config, tmp_working_dir, options, logger):
1370     '''Prepare a dictionary that stores all the needed directories and files to
1371        add in a salomeTool package.
1372     
1373     :param tmp_working_dir str: The temporary local working directory 
1374     :param options OptResult: the options of the launched command
1375     :return: the dictionary that stores all the needed directories and files to
1376              add in a salomeTool package.
1377              {label : (path_on_local_machine, path_in_archive)}
1378     :rtype: dict
1379     '''
1380     d_project = {}
1381
1382     # we include sat himself
1383     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1384
1385     # and we overwrite local.pyconf with a clean wersion.
1386     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1387     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1388     local_cfg = src.pyconf.Config(local_file_path)
1389     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1390     local_cfg.LOCAL["base"] = "default"
1391     local_cfg.LOCAL["workdir"] = "default"
1392     local_cfg.LOCAL["log_dir"] = "default"
1393     local_cfg.LOCAL["archive_dir"] = "default"
1394     local_cfg.LOCAL["VCS"] = "None"
1395     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1396
1397     # if the archive contains a project, we write its relative path in local.pyconf
1398     if options.project:
1399         project_arch_path = os.path.join("projects", options.project, 
1400                                          os.path.basename(options.project_file_path))
1401         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1402
1403     ff = open(local_pyconf_tmp_path, 'w')
1404     local_cfg.__save__(ff, 1)
1405     ff.close()
1406     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1407     return d_project
1408     
1409
1410 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1411     '''Prepare a dictionary that stores all the needed directories and files to
1412        add in a project package.
1413     
1414     :param project_file_path str: The path to the local project.
1415     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1416     :param tmp_working_dir str: The temporary local directory containing some 
1417                                 specific directories or files needed in the 
1418                                 project package
1419     :param embedded_in_sat boolean : the project package is embedded in a sat package
1420     :return: the dictionary that stores all the needed directories and files to
1421              add in a project package.
1422              {label : (path_on_local_machine, path_in_archive)}
1423     :rtype: dict
1424     '''
1425     d_project = {}
1426     # Read the project file and get the directories to add to the package
1427     
1428     try: 
1429       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1430     except:
1431       logger.write("""
1432 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1433       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1434       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1435     
1436     paths = {"APPLICATIONPATH" : "applications",
1437              "PRODUCTPATH" : "products",
1438              "JOBPATH" : "jobs",
1439              "MACHINEPATH" : "machines"}
1440     if not ftp_mode:
1441         paths["ARCHIVEPATH"] = "archives"
1442
1443     # Loop over the project paths and add it
1444     project_file_name = os.path.basename(project_file_path)
1445     for path in paths:
1446         if path not in project_pyconf_cfg:
1447             continue
1448         if embedded_in_sat:
1449             dest_path = os.path.join("projects", name_project, paths[path])
1450             project_file_dest = os.path.join("projects", name_project, project_file_name)
1451         else:
1452             dest_path = paths[path]
1453             project_file_dest = project_file_name
1454
1455         # Add the directory to the files to add in the package
1456         d_project[path] = (project_pyconf_cfg[path], dest_path)
1457
1458         # Modify the value of the path in the package
1459         project_pyconf_cfg[path] = src.pyconf.Reference(
1460                                     project_pyconf_cfg,
1461                                     src.pyconf.DOLLAR,
1462                                     'project_path + "/' + paths[path] + '"')
1463     
1464     # Modify some values
1465     if "project_path" not in project_pyconf_cfg:
1466         project_pyconf_cfg.addMapping("project_path",
1467                                       src.pyconf.Mapping(project_pyconf_cfg),
1468                                       "")
1469     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1470                                                            src.pyconf.DOLLAR,
1471                                                            'PWD')
1472     # we don't want to export these two fields
1473     project_pyconf_cfg.__delitem__("file_path")
1474     project_pyconf_cfg.__delitem__("PWD")
1475     if ftp_mode:
1476         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1477     
1478     # Write the project pyconf file
1479     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1480     ff = open(project_pyconf_tmp_path, 'w')
1481     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1482     project_pyconf_cfg.__save__(ff, 1)
1483     ff.close()
1484     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1485     
1486     return d_project
1487
1488 def add_readme(config, options, where):
1489     readme_path = os.path.join(where, "README")
1490     with codecs.open(readme_path, "w", 'utf-8') as f:
1491
1492     # templates for building the header
1493         readme_header="""
1494 # This package was generated with sat $version
1495 # Date: $date
1496 # User: $user
1497 # Distribution : $dist
1498
1499 In the following, $$ROOT represents the directory where you have installed 
1500 SALOME (the directory where this file is located).
1501
1502 """
1503         if src.architecture.is_windows():
1504             readme_header = readme_header.replace('$$ROOT','%ROOT%')
1505         readme_compilation_with_binaries="""
1506
1507 compilation based on the binaries used as prerequisites
1508 =======================================================
1509
1510 If you fail to compile the complete application (for example because
1511 you are not root on your system and cannot install missing packages), you
1512 may try a partial compilation based on the binaries.
1513 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1514 and do some substitutions on cmake and .la files (replace the build directories
1515 with local paths).
1516 The procedure to do it is:
1517  1) Remove or rename INSTALL directory if it exists
1518  2) Execute the shell script install_bin.sh:
1519  > cd $ROOT
1520  > ./install_bin.sh
1521  3) Use SalomeTool (as explained in Sources section) and compile only the 
1522     modules you need to (with -p option)
1523
1524 """
1525         readme_header_tpl=string.Template(readme_header)
1526         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1527                 "README_BIN.template")
1528         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1529                 "README_LAUNCHER.template")
1530         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1531                 "README_BIN_VIRTUAL_APP.template")
1532         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1533                 "README_SRC.template")
1534         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1535                 "README_PROJECT.template")
1536         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1537                 "README_SAT.template")
1538
1539         # prepare substitution dictionary
1540         d = dict()
1541         d['user'] = config.VARS.user
1542         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1543         d['version'] = src.get_salometool_version(config)
1544         d['dist'] = config.VARS.dist
1545         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1546
1547         if options.binaries or options.sources:
1548             d['application'] = config.VARS.application
1549             d['BINARIES']    = config.INTERNAL.config.binary_dir
1550             d['SEPARATOR'] = config.VARS.sep
1551             if src.architecture.is_windows():
1552                 d['operatingSystem'] = 'Windows'
1553                 d['PYTHON3'] = 'python3'
1554                 d['ROOT']    = '%ROOT%'
1555             else:
1556                 d['operatingSystem'] = 'Linux'
1557                 d['PYTHON3'] = ''
1558                 d['ROOT']    = '$ROOT'
1559             f.write("# Application: " + d['application'] + "\n")
1560             if 'KERNEL' in config.APPLICATION.products:
1561                 VersionSalome = src.get_salome_version(config)
1562                 # Case where SALOME has the launcher that uses the SalomeContext API
1563                 if VersionSalome >= 730:
1564                     d['launcher'] = config.APPLICATION.profile.launcher_name
1565                 else:
1566                     d['virtual_app'] = 'runAppli' # this info is not used now)
1567
1568         # write the specific sections
1569         if options.binaries:
1570             f.write(src.template.substitute(readme_template_path_bin, d))
1571             if "virtual_app" in d:
1572                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1573             if "launcher" in d:
1574                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1575
1576         if options.sources:
1577             f.write(src.template.substitute(readme_template_path_src, d))
1578
1579         if options.binaries and options.sources and not src.architecture.is_windows():
1580             f.write(readme_compilation_with_binaries)
1581
1582         if options.project:
1583             f.write(src.template.substitute(readme_template_path_pro, d))
1584
1585         if options.sat:
1586             f.write(src.template.substitute(readme_template_path_sat, d))
1587     
1588     return readme_path
1589
1590 def update_config(config, logger,  prop, value):
1591     '''Remove from config.APPLICATION.products the products that have the property given as input.
1592     
1593     :param config Config: The global config.
1594     :param prop str: The property to filter
1595     :param value str: The value of the property to filter
1596     '''
1597     # if there is no APPLICATION (ex sat package -t) : nothing to do
1598     if "APPLICATION" in config:
1599         l_product_to_remove = []
1600         for product_name in config.APPLICATION.products.keys():
1601             prod_cfg = src.product.get_product_config(config, product_name)
1602             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1603                 l_product_to_remove.append(product_name)
1604         for product_name in l_product_to_remove:
1605             config.APPLICATION.products.__delitem__(product_name)
1606             logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1607
1608 def description():
1609     '''method that is called when salomeTools is called with --help option.
1610     
1611     :return: The text to display for the package command description.
1612     :rtype: str
1613     '''
1614     return _("""
1615 The package command creates a tar file archive of a product.
1616 There are four kinds of archive, which can be mixed:
1617
1618  1 - The binary archive. 
1619      It contains the product installation directories plus a launcher.
1620  2 - The sources archive. 
1621      It contains the product archives, a project (the application plus salomeTools).
1622  3 - The project archive. 
1623      It contains a project (give the project file path as argument).
1624  4 - The salomeTools archive. 
1625      It contains code utility salomeTools.
1626
1627 example:
1628  >> sat package SALOME-master --binaries --sources""")
1629   
1630 def run(args, runner, logger):
1631     '''method that is called when salomeTools is called with package parameter.
1632     '''
1633     
1634     # Parse the options
1635     (options, args) = parser.parse_args(args)
1636
1637     
1638     # Check that a type of package is called, and only one
1639     all_option_types = (options.binaries,
1640                         options.sources,
1641                         options.project not in ["", None],
1642                         options.sat,
1643                         options.bin_products)
1644
1645     # Check if no option for package type
1646     if all_option_types.count(True) == 0:
1647         msg = _("Error: Precise a type for the package\nUse one of the "
1648                 "following options: --binaries, --sources, --project or"
1649                 " --salometools, --bin_products")
1650         logger.write(src.printcolors.printcError(msg), 1)
1651         logger.write("\n", 1)
1652         return 1
1653     do_create_package = options.binaries or options.sources or options.project or options.sat 
1654
1655     if options.bin_products:
1656         ret = bin_products_archives(runner.cfg, logger, options.with_vcs)
1657         if ret!=0:
1658             return ret
1659     if not do_create_package:
1660         return 0
1661
1662     # continue to create a tar.gz package 
1663
1664     # The repository where to put the package if not Binary or Source
1665     package_default_path = runner.cfg.LOCAL.workdir
1666     # if the package contains binaries or sources:
1667     if options.binaries or options.sources or options.bin_products:
1668         # Check that the command has been called with an application
1669         src.check_config_has_application(runner.cfg)
1670
1671         # Display information
1672         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1673                                                     runner.cfg.VARS.application), 1)
1674         
1675         # Get the default directory where to put the packages
1676         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1677         src.ensure_path_exists(package_default_path)
1678         
1679     # if the package contains a project:
1680     if options.project:
1681         # check that the project is visible by SAT
1682         projectNameFile = options.project + ".pyconf"
1683         foundProject = None
1684         for i in runner.cfg.PROJECTS.project_file_paths:
1685             baseName = os.path.basename(i)
1686             if baseName == projectNameFile:
1687                 foundProject = i
1688                 break
1689
1690         if foundProject is None:
1691             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1692             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1693 known projects are:
1694 %(2)s
1695
1696 Please add it in file:
1697 %(3)s""" % \
1698                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1699             logger.write(src.printcolors.printcError(msg), 1)
1700             logger.write("\n", 1)
1701             return 1
1702         else:
1703             options.project_file_path = foundProject
1704             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1705     
1706     # Remove the products that are filtered by the --without_properties option
1707     if options.without_properties:
1708         prop, value = options.without_properties
1709         update_config(runner.cfg, logger, prop, value)
1710
1711     # Remove from config the products that have the not_in_package property
1712     update_config(runner.cfg, logger, "not_in_package", "yes")
1713
1714     # get the name of the archive or build it
1715     if options.name:
1716         if os.path.basename(options.name) == options.name:
1717             # only a name (not a path)
1718             archive_name = options.name           
1719             dir_name = package_default_path
1720         else:
1721             archive_name = os.path.basename(options.name)
1722             dir_name = os.path.dirname(options.name)
1723         
1724         # suppress extension
1725         if archive_name[-len(".tgz"):] == ".tgz":
1726             archive_name = archive_name[:-len(".tgz")]
1727         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1728             archive_name = archive_name[:-len(".tar.gz")]
1729         
1730     else:
1731         archive_name=""
1732         dir_name = package_default_path
1733         if options.binaries or options.sources:
1734             archive_name = runner.cfg.APPLICATION.name
1735
1736         if options.binaries:
1737             archive_name += "-"+runner.cfg.VARS.dist
1738             
1739         if options.sources:
1740             archive_name += "-SRC"
1741             if options.with_vcs:
1742                 archive_name += "-VCS"
1743
1744         if options.sat:
1745             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1746
1747         if options.project:
1748             if options.sat:
1749                 archive_name += "_" 
1750             archive_name += ("satproject_" + options.project)
1751  
1752         if len(archive_name)==0: # no option worked 
1753             msg = _("Error: Cannot name the archive\n"
1754                     " check if at least one of the following options was "
1755                     "selected : --binaries, --sources, --project or"
1756                     " --salometools")
1757             logger.write(src.printcolors.printcError(msg), 1)
1758             logger.write("\n", 1)
1759             return 1
1760  
1761     path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1762     
1763     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1764
1765     # Create a working directory for all files that are produced during the
1766     # package creation and that will be removed at the end of the command
1767     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1768     src.ensure_path_exists(tmp_working_dir)
1769     logger.write("\n", 5)
1770     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1771     
1772     logger.write("\n", 3)
1773
1774     msg = _("Preparation of files to add to the archive")
1775     logger.write(src.printcolors.printcLabel(msg), 2)
1776     logger.write("\n", 2)
1777     
1778     d_files_to_add={}  # content of the archive
1779
1780     # a dict to hold paths that will need to be substitute for users recompilations
1781     d_paths_to_substitute={}  
1782
1783     if options.binaries:
1784         d_bin_files_to_add = binary_package(runner.cfg,
1785                                             logger,
1786                                             options,
1787                                             tmp_working_dir)
1788         # for all binaries dir, store the substitution that will be required 
1789         # for extra compilations
1790         for key in d_bin_files_to_add:
1791             if key.endswith("(bin)"):
1792                 source_dir = d_bin_files_to_add[key][0]
1793                 path_in_archive = d_bin_files_to_add[key][1].replace(
1794                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1795                    runner.cfg.INTERNAL.config.install_dir)
1796                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1797                     # if basename is the same we will just substitute the dirname 
1798                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1799                         os.path.dirname(path_in_archive)
1800                 else:
1801                     d_paths_to_substitute[source_dir]=path_in_archive
1802
1803         d_files_to_add.update(d_bin_files_to_add)
1804     if options.sources:
1805         d_files_to_add.update(source_package(runner,
1806                                         runner.cfg,
1807                                         logger, 
1808                                         options,
1809                                         tmp_working_dir))
1810         if options.binaries:
1811             # for archives with bin and sources we provide a shell script able to 
1812             # install binaries for compilation
1813             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1814                                                       tmp_working_dir,
1815                                                       d_paths_to_substitute,
1816                                                       "install_bin.sh")
1817             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1818             logger.write("substitutions that need to be done later : \n", 5)
1819             logger.write(str(d_paths_to_substitute), 5)
1820             logger.write("\n", 5)
1821     else:
1822         # --salomeTool option is not considered when --sources is selected, as this option
1823         # already brings salomeTool!
1824         if options.sat:
1825             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1826                                   options, logger))
1827         
1828     if options.project:
1829         DBG.write("config for package %s" % options.project, runner.cfg)
1830         d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1831
1832     if not(d_files_to_add):
1833         msg = _("Error: Empty dictionnary to build the archive!\n")
1834         logger.write(src.printcolors.printcError(msg), 1)
1835         logger.write("\n", 1)
1836         return 1
1837
1838     # Add the README file in the package
1839     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1840     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1841
1842     # Add the additional files of option add_files
1843     if options.add_files:
1844         for file_path in options.add_files:
1845             if not os.path.exists(file_path):
1846                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1847                 continue
1848             file_name = os.path.basename(file_path)
1849             d_files_to_add[file_name] = (file_path, file_name)
1850
1851     logger.write("\n", 2)
1852     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1853     logger.write("\n", 2)
1854     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1855
1856     res = 0
1857     try:
1858         # Creating the object tarfile
1859         tar = tarfile.open(path_targz, mode='w:gz')
1860         
1861         # get the filtering function if needed
1862         if old_python:
1863             filter_function = exclude_VCS_and_extensions_26
1864         else:
1865             filter_function = exclude_VCS_and_extensions
1866
1867         # Add the files to the tarfile object
1868         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1869         tar.close()
1870     except KeyboardInterrupt:
1871         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1872         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1873         # remove the working directory
1874         shutil.rmtree(tmp_working_dir)
1875         logger.write(_("OK"), 1)
1876         logger.write(_("\n"), 1)
1877         return 1
1878     
1879     # case if no application, only package sat as 'sat package -t'
1880     try:
1881         app = runner.cfg.APPLICATION
1882     except:
1883         app = None
1884
1885     # unconditionaly remove the tmp_local_working_dir
1886     if app is not None:
1887         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1888         if os.path.isdir(tmp_local_working_dir):
1889             shutil.rmtree(tmp_local_working_dir)
1890
1891     # remove the tmp directory, unless user has registered as developer
1892     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1893         shutil.rmtree(tmp_working_dir)
1894     
1895     # Print again the path of the package
1896     logger.write("\n", 2)
1897     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1898     
1899     return res