Salome HOME
926df5dbf0ddd0b30de8848946e168974dc80657
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28 import sys
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
35
36 BINARY = "binary"
37 SOURCE = "Source"
38 PROJECT = "Project"
39 SAT = "Sat"
40
41 ARCHIVE_DIR = "ARCHIVES"
42 PROJECT_DIR = "PROJECT"
43
44 IGNORED_DIRS = [".git", ".svn"]
45 IGNORED_EXTENSIONS = []
46
47 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
48
49 PROJECT_TEMPLATE = """#!/usr/bin/env python
50 #-*- coding:utf-8 -*-
51
52 # The path to the archive root directory
53 root_path : $PWD + "/../"
54 # path to the PROJECT
55 project_path : $PWD + "/"
56
57 # Where to search the archives of the products
58 ARCHIVEPATH : $root_path + "ARCHIVES"
59 # Where to search the pyconf of the applications
60 APPLICATIONPATH : $project_path + "applications/"
61 # Where to search the pyconf of the products
62 PRODUCTPATH : $project_path + "products/"
63 # Where to search the pyconf of the jobs of the project
64 JOBPATH : $project_path + "jobs/"
65 # Where to search the pyconf of the machines of the project
66 MACHINEPATH : $project_path + "machines/"
67 """
68
69 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
70 #-*- coding:utf-8 -*-
71
72   LOCAL :
73   {
74     base : 'default'
75     workdir : 'default'
76     log_dir : 'default'
77     archive_dir : 'default'
78     VCS : 'unknown'
79     tag : 'unknown'
80   }
81
82 PROJECTS :
83 {
84 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
85 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
86 }
87 """)
88
89 # Define all possible option for the package command :  sat package <options>
90 parser = src.options.Options()
91 parser.add_option('b', 'binaries', 'boolean', 'binaries',
92     _('Optional: Produce a binary package.'), False)
93 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
94     _('Optional: Only binary package: produce the archive even if '
95       'there are some missing products.'), False)
96 parser.add_option('s', 'sources', 'boolean', 'sources',
97     _('Optional: Produce a compilable archive of the sources of the '
98       'application.'), False)
99 parser.add_option('', 'bin_products', 'boolean', 'bin_products',
100     _('Optional: Create binary archives for all products.'), False)
101 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
102     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
103       'Sat prepare will use VCS mode instead to retrieve them'),
104     False)
105 parser.add_option('', 'ftp', 'boolean', 'ftp',
106     _('Optional: Do not embed archives for products in archive mode.' 
107     'Sat prepare will use ftp instead to retrieve them'),
108     False)
109 parser.add_option('e', 'exe', 'string', 'exe',
110     _('Optional: Produce an extra launcher based upon the exe given as argument.'), "")
111 parser.add_option('p', 'project', 'string', 'project',
112     _('Optional: Produce an archive that contains a project.'), "")
113 parser.add_option('t', 'salometools', 'boolean', 'sat',
114     _('Optional: Produce an archive that contains salomeTools.'), False)
115 parser.add_option('n', 'name', 'string', 'name',
116     _('Optional: The name or full path of the archive.'), None)
117 parser.add_option('', 'add_files', 'list2', 'add_files',
118     _('Optional: The list of additional files to add to the archive.'), [])
119 parser.add_option('', 'without_properties', 'properties', 'without_properties',
120     _('Optional: Filter the products by their properties.\n\tSyntax: '
121       '--without_properties <property>:<value>'))
122
123
124 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
125     '''Create an archive containing all directories and files that are given in
126        the d_content argument.
127     
128     :param tar tarfile: The tarfile instance used to make the archive.
129     :param name_archive str: The name of the archive to make.
130     :param d_content dict: The dictionary that contain all directories and files
131                            to add in the archive.
132                            d_content[label] = 
133                                         (path_on_local_machine, path_in_archive)
134     :param logger Logger: the logging instance
135     :param f_exclude Function: the function that filters
136     :return: 0 if success, 1 if not.
137     :rtype: int
138     '''
139     # get the max length of the messages in order to make the display
140     max_len = len(max(d_content.keys(), key=len))
141     
142     success = 0
143     # loop over each directory or file stored in the d_content dictionary
144     names = sorted(d_content.keys())
145     DBG.write("add tar names", names)
146
147     # used to avoid duplications (for pip install in python, or single_install_dir cases)
148     already_added=set() 
149     for name in names:
150         # display information
151         len_points = max_len - len(name) + 3
152         local_path, archive_path = d_content[name]
153         in_archive = os.path.join(name_archive, archive_path)
154         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
155         # Get the local path and the path in archive 
156         # of the directory or file to add
157         # Add it in the archive
158         try:
159             key=local_path+"->"+in_archive
160             if key not in already_added:
161                 if old_python:
162                     tar.add(local_path,
163                                  arcname=in_archive,
164                                  exclude=exclude_VCS_and_extensions_26)
165                 else:
166                     tar.add(local_path,
167                                  arcname=in_archive,
168                                  filter=exclude_VCS_and_extensions)
169                 already_added.add(key)
170             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
171         except Exception as e:
172             logger.write(src.printcolors.printcError(_("KO ")), 3)
173             logger.write(str(e), 3)
174             success = 1
175         logger.write("\n", 3)
176     return success
177
178
179 def exclude_VCS_and_extensions_26(filename):
180     ''' The function that is used to exclude from package the link to the 
181         VCS repositories (like .git) (only for python 2.6)
182
183     :param filename Str: The filname to exclude (or not).
184     :return: True if the file has to be exclude
185     :rtype: Boolean
186     '''
187     for dir_name in IGNORED_DIRS:
188         if dir_name in filename:
189             return True
190     for extension in IGNORED_EXTENSIONS:
191         if filename.endswith(extension):
192             return True
193     return False
194
195 def exclude_VCS_and_extensions(tarinfo):
196     ''' The function that is used to exclude from package the link to the 
197         VCS repositories (like .git)
198
199     :param filename Str: The filname to exclude (or not).
200     :return: None if the file has to be exclude
201     :rtype: tarinfo or None
202     '''
203     filename = tarinfo.name
204     for dir_name in IGNORED_DIRS:
205         if dir_name in filename:
206             return None
207     for extension in IGNORED_EXTENSIONS:
208         if filename.endswith(extension):
209             return None
210     return tarinfo
211
212 def produce_relative_launcher(config,
213                               logger,
214                               file_dir,
215                               file_name,
216                               binaries_dir_name):
217     '''Create a specific SALOME launcher for the binary package. This launcher 
218        uses relative paths.
219     
220     :param config Config: The global configuration.
221     :param logger Logger: the logging instance
222     :param file_dir str: the directory where to put the launcher
223     :param file_name str: The launcher name
224     :param binaries_dir_name str: the name of the repository where the binaries
225                                   are, in the archive.
226     :return: the path of the produced launcher
227     :rtype: str
228     '''
229     
230     # set base mode to "no" for the archive - save current mode to restore it at the end
231     if "base" in config.APPLICATION:
232         base_setting=config.APPLICATION.base 
233     else:
234         base_setting="maybe"
235     config.APPLICATION.base="no"
236
237     # get KERNEL installation path 
238     kernel_info = src.product.get_product_config(config, "KERNEL")
239     kernel_base_name=os.path.basename(kernel_info.install_dir)
240     if kernel_info.install_mode == "base":
241         # case of kernel installed in base. the kernel install dir name is different in the archive
242         kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
243     
244     kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
245
246     # set kernel bin dir (considering fhs property)
247     kernel_cfg = src.product.get_product_config(config, "KERNEL")
248     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
249         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
250     else:
251         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
252
253     # check if the application contains an application module
254     # check also if the application has a distene product, 
255     # in this case get its licence file name
256     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
257     salome_application_name="Not defined" 
258     distene_licence_file_name=False
259     for prod_name, prod_info in l_product_info:
260         # look for a "salome application" and a distene product
261         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
262             distene_licence_file_name = src.product.product_has_licence(prod_info, 
263                                             config.PATHS.LICENCEPATH) 
264         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
265             salome_application_name=prod_info.name
266
267     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
268     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
269     if salome_application_name == "Not defined":
270         app_root_dir=kernel_root_dir
271     else:
272         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
273
274     additional_env={}
275     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
276                                                    config.VARS.sep + bin_kernel_install_dir
277     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
278         additional_env['sat_python_version'] = 3
279     else:
280         additional_env['sat_python_version'] = 2
281
282     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
283
284     # create an environment file writer
285     writer = src.environment.FileEnvWriter(config,
286                                            logger,
287                                            file_dir,
288                                            src_root=None,
289                                            env_info=None)
290     
291     filepath = os.path.join(file_dir, file_name)
292     # Write
293     writer.write_env_file(filepath,
294                           False,  # for launch
295                           "cfgForPy",
296                           additional_env=additional_env,
297                           no_path_init="False",
298                           for_package = binaries_dir_name)
299     
300     # Little hack to put out_dir_Path outside the strings
301     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
302     src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
303     
304     # A hack to put a call to a file for distene licence.
305     # It does nothing to an application that has no distene product
306     if distene_licence_file_name:
307         logger.write("Application has a distene licence file! We use it in package launcher", 5)
308         hack_for_distene_licence(filepath, distene_licence_file_name)
309        
310     # change the rights in order to make the file executable for everybody
311     os.chmod(filepath,
312              stat.S_IRUSR |
313              stat.S_IRGRP |
314              stat.S_IROTH |
315              stat.S_IWUSR |
316              stat.S_IXUSR |
317              stat.S_IXGRP |
318              stat.S_IXOTH)
319
320     # restore modified setting by its initial value
321     config.APPLICATION.base=base_setting
322
323     return filepath
324
325 def hack_for_distene_licence(filepath, licence_file):
326     '''Replace the distene licence env variable by a call to a file.
327     
328     :param filepath Str: The path to the launcher to modify.
329     '''  
330     shutil.move(filepath, filepath + "_old")
331     fileout= filepath
332     filein = filepath + "_old"
333     fin = open(filein, "r")
334     fout = open(fileout, "w")
335     text = fin.readlines()
336     # Find the Distene section
337     num_line = -1
338     for i,line in enumerate(text):
339         if "# Set DISTENE License" in line:
340             num_line = i
341             break
342     if num_line == -1:
343         # No distene product, there is nothing to do
344         fin.close()
345         for line in text:
346             fout.write(line)
347         fout.close()
348         return
349     del text[num_line +1]
350     del text[num_line +1]
351     text_to_insert ="""    try:
352         distene_licence_file=r"%s"
353         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
354             import importlib.util
355             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
356             distene=importlib.util.module_from_spec(spec_dist)
357             spec_dist.loader.exec_module(distene)
358         else:
359             import imp
360             distene = imp.load_source('distene_licence', distene_licence_file)
361         distene.set_distene_variables(context)
362     except:
363         pass\n"""  % licence_file
364     text.insert(num_line + 1, text_to_insert)
365     for line in text:
366         fout.write(line)
367     fin.close()    
368     fout.close()
369     return
370     
371 def produce_relative_env_files(config,
372                               logger,
373                               file_dir,
374                               binaries_dir_name,
375                               exe_name=None):
376     '''Create some specific environment files for the binary package. These 
377        files use relative paths.
378     
379     :param config Config: The global configuration.
380     :param logger Logger: the logging instance
381     :param file_dir str: the directory where to put the files
382     :param binaries_dir_name str: the name of the repository where the binaries
383                                   are, in the archive.
384     :param exe_name str: if given generate a launcher executing exe_name
385     :return: the list of path of the produced environment files
386     :rtype: List
387     '''  
388
389     # set base mode to "no" for the archive - save current mode to restore it at the end
390     if "base" in config.APPLICATION:
391         base_setting=config.APPLICATION.base 
392     else:
393         base_setting="maybe"
394     config.APPLICATION.base="no"
395
396     # create an environment file writer
397     writer = src.environment.FileEnvWriter(config,
398                                            logger,
399                                            file_dir,
400                                            src_root=None)
401     
402     if src.architecture.is_windows():
403       shell = "bat"
404       filename  = "env_launch.bat"
405     else:
406       shell = "bash"
407       filename  = "env_launch.sh"
408
409     if exe_name:
410         filename=os.path.basename(exe_name)
411
412     # Write
413     filepath = writer.write_env_file(filename,
414                           False, # for launch
415                           shell,
416                           for_package = binaries_dir_name)
417
418     # Little hack to put out_dir_Path as environment variable
419     if src.architecture.is_windows() :
420       src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
421       src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
422       src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
423     else:
424       src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
425       src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
426
427     if exe_name:
428         if src.architecture.is_windows():
429             cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
430         else:
431             cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
432         with open(filepath, "a") as exe_launcher:
433             exe_launcher.write(cmd)
434
435     # change the rights in order to make the file executable for everybody
436     os.chmod(filepath,
437              stat.S_IRUSR |
438              stat.S_IRGRP |
439              stat.S_IROTH |
440              stat.S_IWUSR |
441              stat.S_IXUSR |
442              stat.S_IXGRP |
443              stat.S_IXOTH)
444     
445     # restore modified setting by its initial value
446     config.APPLICATION.base=base_setting
447
448     return filepath
449
450 def produce_install_bin_file(config,
451                              logger,
452                              file_dir,
453                              d_sub,
454                              file_name):
455     '''Create a bash shell script which do substitutions in BIRARIES dir 
456        in order to use it for extra compilations.
457     
458     :param config Config: The global configuration.
459     :param logger Logger: the logging instance
460     :param file_dir str: the directory where to put the files
461     :param d_sub, dict: the dictionnary that contains the substitutions to be done
462     :param file_name str: the name of the install script file
463     :return: the produced file
464     :rtype: str
465     '''  
466     # Write
467     filepath = os.path.join(file_dir, file_name)
468     # open the file and write into it
469     # use codec utf-8 as sat variables are in unicode
470     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
471         installbin_template_path = os.path.join(config.VARS.internal_dir,
472                                         "INSTALL_BIN.template")
473         
474         # build the name of the directory that will contain the binaries
475         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
476         # build the substitution loop
477         loop_cmd = "for f in $(grep -RIl"
478         for key in d_sub:
479             loop_cmd += " -e "+ key
480         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
481                     '); do\n     sed -i "\n'
482         for key in d_sub:
483             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
484         loop_cmd += '            " $f\ndone'
485
486         d={}
487         d["BINARIES_DIR"] = binaries_dir_name
488         d["SUBSTITUTION_LOOP"]=loop_cmd
489         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
490         
491         # substitute the template and write it in file
492         content=src.template.substitute(installbin_template_path, d)
493         installbin_file.write(content)
494         # change the rights in order to make the file executable for everybody
495         os.chmod(filepath,
496                  stat.S_IRUSR |
497                  stat.S_IRGRP |
498                  stat.S_IROTH |
499                  stat.S_IWUSR |
500                  stat.S_IXUSR |
501                  stat.S_IXGRP |
502                  stat.S_IXOTH)
503     
504     return filepath
505
506 def product_appli_creation_script(config,
507                                   logger,
508                                   file_dir,
509                                   binaries_dir_name):
510     '''Create a script that can produce an application (EDF style) in the binary
511        package.
512     
513     :param config Config: The global configuration.
514     :param logger Logger: the logging instance
515     :param file_dir str: the directory where to put the file
516     :param binaries_dir_name str: the name of the repository where the binaries
517                                   are, in the archive.
518     :return: the path of the produced script file
519     :rtype: Str
520     '''
521     template_name = "create_appli.py.for_bin_packages.template"
522     template_path = os.path.join(config.VARS.internal_dir, template_name)
523     text_to_fill = open(template_path, "r").read()
524     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
525                                         '"' + binaries_dir_name + '"')
526     
527     text_to_add = ""
528     for product_name in get_SALOME_modules(config):
529         product_info = src.product.get_product_config(config, product_name)
530        
531         if src.product.product_is_smesh_plugin(product_info):
532             continue
533
534         if 'install_dir' in product_info and bool(product_info.install_dir):
535             if src.product.product_is_cpp(product_info):
536                 # cpp module
537                 for cpp_name in src.product.get_product_components(product_info):
538                     line_to_add = ("<module name=\"" + 
539                                    cpp_name + 
540                                    "\" gui=\"yes\" path=\"''' + "
541                                    "os.path.join(dir_bin_name, \"" + 
542                                    cpp_name + "\") + '''\"/>")
543             else:
544                 # regular module
545                 line_to_add = ("<module name=\"" + 
546                                product_name + 
547                                "\" gui=\"yes\" path=\"''' + "
548                                "os.path.join(dir_bin_name, \"" + 
549                                product_name + "\") + '''\"/>")
550             text_to_add += line_to_add + "\n"
551     
552     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
553     
554     tmp_file_path = os.path.join(file_dir, "create_appli.py")
555     ff = open(tmp_file_path, "w")
556     ff.write(filled_text)
557     ff.close()
558     
559     # change the rights in order to make the file executable for everybody
560     os.chmod(tmp_file_path,
561              stat.S_IRUSR |
562              stat.S_IRGRP |
563              stat.S_IROTH |
564              stat.S_IWUSR |
565              stat.S_IXUSR |
566              stat.S_IXGRP |
567              stat.S_IXOTH)
568     
569     return tmp_file_path
570
571 def bin_products_archives(config, logger):
572     '''Prepare binary packages for all products
573     :param config Config: The global configuration.
574     :return: the error status
575     :rtype: bool
576     '''
577
578     logger.write("Make %s binary archives\n" % config.VARS.dist)
579     # Get the default directory where to put the packages
580     binpackage_path = os.path.join(config.APPLICATION.workdir, "PACKAGE", "products")
581     src.ensure_path_exists(binpackage_path)
582     # Get the list of product installation to add to the archive
583     l_products_name = sorted(config.APPLICATION.products.keys())
584     l_product_info = src.product.get_products_infos(l_products_name,
585                                                     config)
586     # first loop on products : filter products, analyse properties,
587     # and store the information that will be used to create the archive in the second loop 
588     l_not_installed=[] # store not installed products for warning at the end
589     for prod_name, prod_info in l_product_info:
590         # ignore the native and fixed products for install directories
591         if (src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
592                 or src.product.product_is_native(prod_info) 
593                 or src.product.product_is_fixed(prod_info)
594                 or not src.product.product_compiles(prod_info)):
595             continue
596         if not src.product.check_installation(config, prod_info):
597             l_not_installed.append(prod_name)
598             continue  # product is not installed, we skip it
599         # prepare call to make_bin_archive
600         path_targz_prod = os.path.join(binpackage_path, prod_name + '-' + prod_info.version + "-" + config.VARS.dist + PACKAGE_EXT) 
601         targz_prod = tarfile.open(path_targz_prod, mode='w:gz')
602         bin_path = prod_info.install_dir
603         targz_prod.add(bin_path)
604         targz_prod.close()
605         # Python program to find MD5 hash value of a file
606         import hashlib
607         with open(path_targz_prod,"rb") as f:
608             bytes = f.read() # read file as bytes
609             readable_hash = hashlib.md5(bytes).hexdigest();
610             with open(path_targz_prod+".md5", "w") as md5sum:
611                md5sum.write("%s  %s" % (readable_hash, os.path.basename(path_targz_prod))) 
612             logger.write("   archive : %s   (md5sum = %s)\n" % (path_targz_prod, readable_hash))
613
614     return 0
615
616 def binary_package(config, logger, options, tmp_working_dir):
617     '''Prepare a dictionary that stores all the needed directories and files to
618        add in a binary package.
619     
620     :param config Config: The global configuration.
621     :param logger Logger: the logging instance
622     :param options OptResult: the options of the launched command
623     :param tmp_working_dir str: The temporary local directory containing some 
624                                 specific directories or files needed in the 
625                                 binary package
626     :return: the dictionary that stores all the needed directories and files to
627              add in a binary package.
628              {label : (path_on_local_machine, path_in_archive)}
629     :rtype: dict
630     '''
631
632     # Get the list of product installation to add to the archive
633     l_products_name = sorted(config.APPLICATION.products.keys())
634     l_product_info = src.product.get_products_infos(l_products_name,
635                                                     config)
636
637     # suppress compile time products for binaries-only archives
638     if not options.sources:
639         update_config(config, logger, "compile_time", "yes")
640
641     l_install_dir = []
642     l_source_dir = []
643     l_not_installed = []
644     l_sources_not_present = []
645     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
646     if ("APPLICATION" in config  and
647         "properties"  in config.APPLICATION  and
648         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
649         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
650             generate_mesa_launcher=True
651
652     # first loop on products : filter products, analyse properties,
653     # and store the information that will be used to create the archive in the second loop 
654     for prod_name, prod_info in l_product_info:
655         # skip product with property not_in_package set to yes
656         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
657             continue  
658
659         # Add the sources of the products that have the property 
660         # sources_in_package : "yes"
661         if src.get_property_in_product_cfg(prod_info,
662                                            "sources_in_package") == "yes":
663             if os.path.exists(prod_info.source_dir):
664                 l_source_dir.append((prod_name, prod_info.source_dir))
665             else:
666                 l_sources_not_present.append(prod_name)
667
668         # ignore the native and fixed products for install directories
669         if (src.product.product_is_native(prod_info) 
670                 or src.product.product_is_fixed(prod_info)
671                 or not src.product.product_compiles(prod_info)):
672             continue
673         # 
674         # products with single_dir property will be installed in the PRODUCTS directory of the archive
675         is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
676                        src.product.product_test_property(prod_info,"single_install_dir", "yes"))
677         if src.product.check_installation(config, prod_info):
678             l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir,
679                                   is_single_dir, prod_info.install_mode))
680         else:
681             l_not_installed.append(prod_name)
682         
683         # Add also the cpp generated modules (if any)
684         if src.product.product_is_cpp(prod_info):
685             # cpp module
686             for name_cpp in src.product.get_product_components(prod_info):
687                 install_dir = os.path.join(config.APPLICATION.workdir,
688                                            config.INTERNAL.config.install_dir,
689                                            name_cpp) 
690                 if os.path.exists(install_dir):
691                     l_install_dir.append((name_cpp, name_cpp, install_dir, False, "value"))
692                 else:
693                     l_not_installed.append(name_cpp)
694         
695     # check the name of the directory that (could) contains the binaries 
696     # from previous detar
697     binaries_from_detar = os.path.join(
698                               config.APPLICATION.workdir,
699                               config.INTERNAL.config.binary_dir + config.VARS.dist)
700     if os.path.exists(binaries_from_detar):
701          logger.write("""
702 WARNING: existing binaries directory from previous detar installation:
703          %s
704          To make new package from this, you have to: 
705          1) install binaries in INSTALL directory with the script "install_bin.sh" 
706             see README file for more details
707          2) or recompile everything in INSTALL with "sat compile" command 
708             this step is long, and requires some linux packages to be installed 
709             on your system\n
710 """ % binaries_from_detar)
711     
712     # Print warning or error if there are some missing products
713     if len(l_not_installed) > 0:
714         text_missing_prods = ""
715         for p_name in l_not_installed:
716             text_missing_prods += " - " + p_name + "\n"
717         if not options.force_creation:
718             msg = _("ERROR: there are missing product installations:")
719             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
720                                      text_missing_prods),
721                          1)
722             raise src.SatException(msg)
723         else:
724             msg = _("WARNING: there are missing products installations:")
725             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
726                                      text_missing_prods),
727                          1)
728
729     # Do the same for sources
730     if len(l_sources_not_present) > 0:
731         text_missing_prods = ""
732         for p_name in l_sources_not_present:
733             text_missing_prods += "-" + p_name + "\n"
734         if not options.force_creation:
735             msg = _("ERROR: there are missing product sources:")
736             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
737                                      text_missing_prods),
738                          1)
739             raise src.SatException(msg)
740         else:
741             msg = _("WARNING: there are missing products sources:")
742             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
743                                      text_missing_prods),
744                          1)
745  
746     # construct the name of the directory that will contain the binaries
747     if src.architecture.is_windows():
748         binaries_dir_name = config.INTERNAL.config.binary_dir
749     else:
750         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
751     # construct the correlation table between the product names, there 
752     # actual install directories and there install directory in archive
753     d_products = {}
754     for prod_name, prod_info_name, install_dir, is_single_dir, install_mode in l_install_dir:
755         prod_base_name=os.path.basename(install_dir)
756         if install_mode == "base":
757             # case of a products installed in base. 
758             # because the archive is in base:no mode, the name of the install dir is different inside archive
759             # we set it to the product name or by PRODUCTS if single-dir
760             if is_single_dir:
761                 prod_base_name=config.INTERNAL.config.single_install_dir
762             else:
763                 prod_base_name=prod_info_name
764         path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
765         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
766         
767     for prod_name, source_dir in l_source_dir:
768         path_in_archive = os.path.join("SOURCES", prod_name)
769         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
770
771     # for packages of SALOME applications including KERNEL, 
772     # we produce a salome launcher or a virtual application (depending on salome version)
773     if 'KERNEL' in config.APPLICATION.products:
774         VersionSalome = src.get_salome_version(config)
775         # Case where SALOME has the launcher that uses the SalomeContext API
776         if VersionSalome >= 730:
777             # create the relative launcher and add it to the files to add
778             launcher_name = src.get_launcher_name(config)
779             launcher_package = produce_relative_launcher(config,
780                                                  logger,
781                                                  tmp_working_dir,
782                                                  launcher_name,
783                                                  binaries_dir_name)
784             d_products["launcher"] = (launcher_package, launcher_name)
785
786             # if the application contains mesa products, we generate in addition to the 
787             # classical salome launcher a launcher using mesa and called mesa_salome 
788             # (the mesa launcher will be used for remote usage through ssh).
789             if generate_mesa_launcher:
790                 #if there is one : store the use_mesa property
791                 restore_use_mesa_option=None
792                 if ('properties' in config.APPLICATION and 
793                     'use_mesa' in config.APPLICATION.properties):
794                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
795
796                 # activate mesa property, and generate a mesa launcher
797                 src.activate_mesa_property(config)  #activate use_mesa property
798                 launcher_mesa_name="mesa_"+launcher_name
799                 launcher_package_mesa = produce_relative_launcher(config,
800                                                      logger,
801                                                      tmp_working_dir,
802                                                      launcher_mesa_name,
803                                                      binaries_dir_name)
804                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
805
806                 # if there was a use_mesa value, we restore it
807                 # else we set it to the default value "no"
808                 if restore_use_mesa_option != None:
809                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
810                 else:
811                     config.APPLICATION.properties.use_mesa="no"
812
813             if options.sources:
814                 # if we mix binaries and sources, we add a copy of the launcher, 
815                 # prefixed  with "bin",in order to avoid clashes
816                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
817         else:
818             # Provide a script for the creation of an application EDF style
819             appli_script = product_appli_creation_script(config,
820                                                         logger,
821                                                         tmp_working_dir,
822                                                         binaries_dir_name)
823             
824             d_products["appli script"] = (appli_script, "create_appli.py")
825
826     # Put also the environment file
827     env_file = produce_relative_env_files(config,
828                                            logger,
829                                            tmp_working_dir,
830                                            binaries_dir_name)
831
832     if src.architecture.is_windows():
833       filename  = "env_launch.bat"
834     else:
835       filename  = "env_launch.sh"
836     d_products["environment file"] = (env_file, filename)      
837
838     # If option exe, produce an extra launcher based on specified exe
839     if options.exe:
840         exe_file = produce_relative_env_files(config,
841                                               logger,
842                                               tmp_working_dir,
843                                               binaries_dir_name,
844                                               options.exe)
845             
846         if src.architecture.is_windows():
847           filename  = os.path.basename(options.exe) + ".bat"
848         else:
849           filename  = os.path.basename(options.exe) + ".sh"
850         d_products["exe file"] = (exe_file, filename)      
851     
852
853     return d_products
854
855 def source_package(sat, config, logger, options, tmp_working_dir):
856     '''Prepare a dictionary that stores all the needed directories and files to
857        add in a source package.
858     
859     :param config Config: The global configuration.
860     :param logger Logger: the logging instance
861     :param options OptResult: the options of the launched command
862     :param tmp_working_dir str: The temporary local directory containing some 
863                                 specific directories or files needed in the 
864                                 binary package
865     :return: the dictionary that stores all the needed directories and files to
866              add in a source package.
867              {label : (path_on_local_machine, path_in_archive)}
868     :rtype: dict
869     '''
870     
871     d_archives={}
872     # Get all the products that are prepared using an archive
873     # unless ftp mode is specified (in this case the user of the
874     # archive will get the sources through the ftp mode of sat prepare
875     if not options.ftp:
876         logger.write("Find archive products ... ")
877         d_archives, l_pinfo_vcs = get_archives(config, logger)
878         logger.write("Done\n")
879
880     d_archives_vcs = {}
881     if not options.with_vcs and len(l_pinfo_vcs) > 0:
882         # Make archives with the products that are not prepared using an archive
883         # (git, cvs, svn, etc)
884         logger.write("Construct archives for vcs products ... ")
885         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
886                                           sat,
887                                           config,
888                                           logger,
889                                           tmp_working_dir)
890         logger.write("Done\n")
891
892     # Create a project
893     logger.write("Create the project ... ")
894     d_project = create_project_for_src_package(config,
895                                                tmp_working_dir,
896                                                options.with_vcs,
897                                                options.ftp)
898     logger.write("Done\n")
899     
900     # Add salomeTools
901     tmp_sat = add_salomeTools(config, tmp_working_dir)
902     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
903     
904     # Add a sat symbolic link if not win
905     if not src.architecture.is_windows():
906         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
907         try:
908             t = os.getcwd()
909         except:
910             # In the jobs, os.getcwd() can fail
911             t = config.LOCAL.workdir
912         os.chdir(tmp_working_dir)
913         if os.path.lexists(tmp_satlink_path):
914             os.remove(tmp_satlink_path)
915         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
916         os.chdir(t)
917         
918         d_sat["sat link"] = (tmp_satlink_path, "sat")
919     
920     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
921     return d_source
922
923 def get_archives(config, logger):
924     '''Find all the products that are get using an archive and all the products
925        that are get using a vcs (git, cvs, svn) repository.
926     
927     :param config Config: The global configuration.
928     :param logger Logger: the logging instance
929     :return: the dictionary {name_product : 
930              (local path of its archive, path in the package of its archive )}
931              and the list of specific configuration corresponding to the vcs 
932              products
933     :rtype: (Dict, List)
934     '''
935     # Get the list of product informations
936     l_products_name = config.APPLICATION.products.keys()
937     l_product_info = src.product.get_products_infos(l_products_name,
938                                                     config)
939     d_archives = {}
940     l_pinfo_vcs = []
941     for p_name, p_info in l_product_info:
942         # skip product with property not_in_package set to yes
943         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
944             continue  
945         # ignore the native and fixed products
946         if (src.product.product_is_native(p_info) 
947                 or src.product.product_is_fixed(p_info)):
948             continue
949         if p_info.get_source == "archive":
950             archive_path = p_info.archive_info.archive_name
951             archive_name = os.path.basename(archive_path)
952             d_archives[p_name] = (archive_path,
953                                   os.path.join(ARCHIVE_DIR, archive_name))
954             if (src.appli_test_property(config,"pip", "yes") and 
955                 src.product.product_test_property(p_info,"pip", "yes")):
956                 # if pip mode is activated, and product is managed by pip
957                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
958                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
959                     "%s-%s*" % (p_info.name, p_info.version))
960                 pip_wheel_path=glob.glob(pip_wheel_pattern)
961                 msg_pip_not_found="Error in get_archive, pip wheel for "\
962                                   "product %s-%s was not found in %s directory"
963                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
964                                   "product %s-%s were found in %s directory"
965                 if len(pip_wheel_path)==0:
966                     raise src.SatException(msg_pip_not_found %\
967                         (p_info.name, p_info.version, pip_wheels_dir))
968                 if len(pip_wheel_path)>1:
969                     raise src.SatException(msg_pip_two_or_more %\
970                         (p_info.name, p_info.version, pip_wheels_dir))
971
972                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
973                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
974                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
975         else:
976             # this product is not managed by archive, 
977             # an archive of the vcs directory will be created by get_archive_vcs
978             l_pinfo_vcs.append((p_name, p_info)) 
979             
980     return d_archives, l_pinfo_vcs
981
982 def add_salomeTools(config, tmp_working_dir):
983     '''Prepare a version of salomeTools that has a specific local.pyconf file 
984        configured for a source package.
985
986     :param config Config: The global configuration.
987     :param tmp_working_dir str: The temporary local directory containing some 
988                                 specific directories or files needed in the 
989                                 source package
990     :return: The path to the local salomeTools directory to add in the package
991     :rtype: str
992     '''
993     # Copy sat in the temporary working directory
994     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
995     sat_running_path = src.Path(config.VARS.salometoolsway)
996     sat_running_path.copy(sat_tmp_path)
997     
998     # Update the local.pyconf file that contains the path to the project
999     local_pyconf_name = "local.pyconf"
1000     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
1001     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
1002     # Remove the .pyconf file in the root directory of salomeTools if there is
1003     # any. (For example when launching jobs, a pyconf file describing the jobs 
1004     # can be here and is not useful) 
1005     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
1006     for file_or_dir in files_or_dir_SAT:
1007         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
1008             file_path = os.path.join(tmp_working_dir,
1009                                      "salomeTools",
1010                                      file_or_dir)
1011             os.remove(file_path)
1012     
1013     ff = open(local_pyconf_file, "w")
1014     ff.write(LOCAL_TEMPLATE)
1015     ff.close()
1016     
1017     return sat_tmp_path.path
1018
1019 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
1020     '''For sources package that require that all products are get using an 
1021        archive, one has to create some archive for the vcs products.
1022        So this method calls the clean and source command of sat and then create
1023        the archives.
1024
1025     :param l_pinfo_vcs List: The list of specific configuration corresponding to
1026                              each vcs product
1027     :param sat Sat: The Sat instance that can be called to clean and source the
1028                     products
1029     :param config Config: The global configuration.
1030     :param logger Logger: the logging instance
1031     :param tmp_working_dir str: The temporary local directory containing some 
1032                                 specific directories or files needed in the 
1033                                 source package
1034     :return: the dictionary that stores all the archives to add in the source 
1035              package. {label : (path_on_local_machine, path_in_archive)}
1036     :rtype: dict
1037     '''
1038     # clean the source directory of all the vcs products, then use the source 
1039     # command and thus construct an archive that will not contain the patches
1040     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
1041     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
1042       logger.write(_("\nclean sources\n"))
1043       args_clean = config.VARS.application
1044       args_clean += " --sources --products "
1045       args_clean += ",".join(l_prod_names)
1046       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
1047       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
1048     if True:
1049       # source
1050       logger.write(_("get sources\n"))
1051       args_source = config.VARS.application
1052       args_source += " --products "
1053       args_source += ",".join(l_prod_names)
1054       svgDir = sat.cfg.APPLICATION.workdir
1055       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
1056       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
1057       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
1058       # DBG.write("sat config id", id(sat.cfg), True)
1059       # shit as config is not same id() as for sat.source()
1060       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
1061       import source
1062       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
1063       
1064       # make the new archives
1065       d_archives_vcs = {}
1066       for pn, pinfo in l_pinfo_vcs:
1067           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
1068           logger.write("make archive vcs '%s'\n" % path_archive)
1069           d_archives_vcs[pn] = (path_archive,
1070                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
1071       sat.cfg.APPLICATION.workdir = svgDir
1072       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
1073     return d_archives_vcs
1074
1075 def make_bin_archive(prod_name, prod_info, where):
1076     '''Create an archive of a product by searching its source directory.
1077
1078     :param prod_name str: The name of the product.
1079     :param prod_info Config: The specific configuration corresponding to the 
1080                              product
1081     :param where str: The path of the repository where to put the resulting 
1082                       archive
1083     :return: The path of the resulting archive
1084     :rtype: str
1085     '''
1086     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1087     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1088     bin_path = prod_info.install_dir
1089     tar_prod.add(bin_path, arcname=path_targz_prod)
1090     tar_prod.close()
1091     return path_targz_prod       
1092
1093 def make_archive(prod_name, prod_info, where):
1094     '''Create an archive of a product by searching its source directory.
1095
1096     :param prod_name str: The name of the product.
1097     :param prod_info Config: The specific configuration corresponding to the 
1098                              product
1099     :param where str: The path of the repository where to put the resulting 
1100                       archive
1101     :return: The path of the resulting archive
1102     :rtype: str
1103     '''
1104     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1105     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1106     local_path = prod_info.source_dir
1107     if old_python:
1108         tar_prod.add(local_path,
1109                      arcname=prod_name,
1110                      exclude=exclude_VCS_and_extensions_26)
1111     else:
1112         tar_prod.add(local_path,
1113                      arcname=prod_name,
1114                      filter=exclude_VCS_and_extensions)
1115     tar_prod.close()
1116     return path_targz_prod       
1117
1118 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
1119     '''Create a specific project for a source package.
1120
1121     :param config Config: The global configuration.
1122     :param tmp_working_dir str: The temporary local directory containing some 
1123                                 specific directories or files needed in the 
1124                                 source package
1125     :param with_vcs boolean: True if the package is with vcs products (not 
1126                              transformed into archive products)
1127     :param with_ftp boolean: True if the package use ftp servers to get archives
1128     :return: The dictionary 
1129              {"project" : (produced project, project path in the archive)}
1130     :rtype: Dict
1131     '''
1132
1133     # Create in the working temporary directory the full project tree
1134     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1135     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
1136                                          "products")
1137     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
1138                                          "products",
1139                                          "compil_scripts")
1140     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
1141                                          "products",
1142                                          "env_scripts")
1143     patches_tmp_dir = os.path.join(project_tmp_dir,
1144                                          "products",
1145                                          "patches")
1146     application_tmp_dir = os.path.join(project_tmp_dir,
1147                                          "applications")
1148     for directory in [project_tmp_dir,
1149                       compil_scripts_tmp_dir,
1150                       env_scripts_tmp_dir,
1151                       patches_tmp_dir,
1152                       application_tmp_dir]:
1153         src.ensure_path_exists(directory)
1154
1155     # Create the pyconf that contains the information of the project
1156     project_pyconf_name = "project.pyconf"        
1157     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1158     ff = open(project_pyconf_file, "w")
1159     ff.write(PROJECT_TEMPLATE)
1160     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1161         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
1162         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1163             ftp_path=ftp_path+":"+ftpserver
1164         ftp_path+='"'
1165         ff.write("# ftp servers where to search for prerequisite archives\n")
1166         ff.write(ftp_path)
1167     # add licence paths if any
1168     if len(config.PATHS.LICENCEPATH) > 0:  
1169         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1170         for path in config.PATHS.LICENCEPATH[1:]:
1171             licence_path=licence_path+":"+path
1172         licence_path+='"'
1173         ff.write("\n# Where to search for licences\n")
1174         ff.write(licence_path)
1175         
1176
1177     ff.close()
1178     
1179     # Loop over the products to get there pyconf and all the scripts 
1180     # (compilation, environment, patches)
1181     # and create the pyconf file to add to the project
1182     lproducts_name = config.APPLICATION.products.keys()
1183     l_products = src.product.get_products_infos(lproducts_name, config)
1184     for p_name, p_info in l_products:
1185         # skip product with property not_in_package set to yes
1186         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1187             continue  
1188         find_product_scripts_and_pyconf(p_name,
1189                                         p_info,
1190                                         config,
1191                                         with_vcs,
1192                                         compil_scripts_tmp_dir,
1193                                         env_scripts_tmp_dir,
1194                                         patches_tmp_dir,
1195                                         products_pyconf_tmp_dir)
1196     
1197     # for the application pyconf, we write directly the config
1198     # don't search for the original pyconf file
1199     # to avoid problems with overwrite sections and rm_products key
1200     write_application_pyconf(config, application_tmp_dir)
1201     
1202     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1203     return d_project
1204
1205 def find_product_scripts_and_pyconf(p_name,
1206                                     p_info,
1207                                     config,
1208                                     with_vcs,
1209                                     compil_scripts_tmp_dir,
1210                                     env_scripts_tmp_dir,
1211                                     patches_tmp_dir,
1212                                     products_pyconf_tmp_dir):
1213     '''Create a specific pyconf file for a given product. Get its environment 
1214        script, its compilation script and patches and put it in the temporary
1215        working directory. This method is used in the source package in order to
1216        construct the specific project.
1217
1218     :param p_name str: The name of the product.
1219     :param p_info Config: The specific configuration corresponding to the 
1220                              product
1221     :param config Config: The global configuration.
1222     :param with_vcs boolean: True if the package is with vcs products (not 
1223                              transformed into archive products)
1224     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1225                                        scripts directory of the project.
1226     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1227                                     directory of the project.
1228     :param patches_tmp_dir str: The path to the temporary patch scripts 
1229                                 directory of the project.
1230     :param products_pyconf_tmp_dir str: The path to the temporary product 
1231                                         scripts directory of the project.
1232     '''
1233     
1234     # read the pyconf of the product
1235     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1236
1237     # find the compilation script if any
1238     if src.product.product_has_script(p_info):
1239         compil_script_path = src.Path(p_info.compil_script)
1240         compil_script_path.copy(compil_scripts_tmp_dir)
1241
1242     # find the environment script if any
1243     if src.product.product_has_env_script(p_info):
1244         env_script_path = src.Path(p_info.environ.env_script)
1245         env_script_path.copy(env_scripts_tmp_dir)
1246
1247     # find the patches if any
1248     if src.product.product_has_patches(p_info):
1249         patches = src.pyconf.Sequence()
1250         for patch_path in p_info.patches:
1251             p_path = src.Path(patch_path)
1252             p_path.copy(patches_tmp_dir)
1253             patches.append(os.path.basename(patch_path), "")
1254
1255     if (not with_vcs) and src.product.product_is_vcs(p_info):
1256         # in non vcs mode, if the product is not archive, then make it become archive.
1257
1258         # depending upon the incremental mode, select impacted sections
1259         if "properties" in p_info and "incremental" in p_info.properties and\
1260             p_info.properties.incremental == "yes":
1261             sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1262         else:
1263             sections = [p_info.section]
1264         for section in sections:
1265             if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1266                 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1267                           (p_name,section))
1268                 product_pyconf_cfg[section].get_source = "archive"
1269                 if not "archive_info" in product_pyconf_cfg[section]:
1270                     product_pyconf_cfg[section].addMapping("archive_info",
1271                                         src.pyconf.Mapping(product_pyconf_cfg),
1272                                         "")
1273                     product_pyconf_cfg[section].archive_info.archive_name =\
1274                         p_info.name + ".tgz"
1275     
1276     if (with_vcs) and src.product.product_is_vcs(p_info):
1277         # in vcs mode we must replace explicitely the git server url
1278         # (or it will not be found later because project files are not exported in archives)
1279         for section in product_pyconf_cfg:
1280             # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
1281             if "git_info" in product_pyconf_cfg[section]:
1282                 for repo in product_pyconf_cfg[section].git_info:
1283                     if repo in p_info.git_info:
1284                         product_pyconf_cfg[section].git_info[repo] =  p_info.git_info[repo]
1285
1286     # write the pyconf file to the temporary project location
1287     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1288                                            p_name + ".pyconf")
1289     ff = open(product_tmp_pyconf_path, 'w')
1290     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1291     product_pyconf_cfg.__save__(ff, 1)
1292     ff.close()
1293
1294
1295 def write_application_pyconf(config, application_tmp_dir):
1296     '''Write the application pyconf file in the specific temporary 
1297        directory containing the specific project of a source package.
1298
1299     :param config Config: The global configuration.
1300     :param application_tmp_dir str: The path to the temporary application 
1301                                     scripts directory of the project.
1302     '''
1303     application_name = config.VARS.application
1304     # write the pyconf file to the temporary application location
1305     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1306                                                application_name + ".pyconf")
1307     with open(application_tmp_pyconf_path, 'w') as f:
1308         f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1309         res = src.pyconf.Config()
1310         app = src.pyconf.deepCopyMapping(config.APPLICATION)
1311
1312         # set base mode to "no" for the archive
1313         app.base = "no"
1314
1315         # Change the workdir
1316         app.workdir = src.pyconf.Reference(
1317                                  app,
1318                                  src.pyconf.DOLLAR,
1319                                  'VARS.salometoolsway + $VARS.sep + ".."')
1320         res.addMapping("APPLICATION", app, "")
1321         res.__save__(f, evaluated=False)
1322     
1323
1324 def sat_package(config, tmp_working_dir, options, logger):
1325     '''Prepare a dictionary that stores all the needed directories and files to
1326        add in a salomeTool package.
1327     
1328     :param tmp_working_dir str: The temporary local working directory 
1329     :param options OptResult: the options of the launched command
1330     :return: the dictionary that stores all the needed directories and files to
1331              add in a salomeTool package.
1332              {label : (path_on_local_machine, path_in_archive)}
1333     :rtype: dict
1334     '''
1335     d_project = {}
1336
1337     # we include sat himself
1338     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1339
1340     # and we overwrite local.pyconf with a clean wersion.
1341     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1342     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1343     local_cfg = src.pyconf.Config(local_file_path)
1344     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1345     local_cfg.LOCAL["base"] = "default"
1346     local_cfg.LOCAL["workdir"] = "default"
1347     local_cfg.LOCAL["log_dir"] = "default"
1348     local_cfg.LOCAL["archive_dir"] = "default"
1349     local_cfg.LOCAL["VCS"] = "None"
1350     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1351
1352     # if the archive contains a project, we write its relative path in local.pyconf
1353     if options.project:
1354         project_arch_path = os.path.join("projects", options.project, 
1355                                          os.path.basename(options.project_file_path))
1356         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1357
1358     ff = open(local_pyconf_tmp_path, 'w')
1359     local_cfg.__save__(ff, 1)
1360     ff.close()
1361     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1362     return d_project
1363     
1364
1365 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1366     '''Prepare a dictionary that stores all the needed directories and files to
1367        add in a project package.
1368     
1369     :param project_file_path str: The path to the local project.
1370     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1371     :param tmp_working_dir str: The temporary local directory containing some 
1372                                 specific directories or files needed in the 
1373                                 project package
1374     :param embedded_in_sat boolean : the project package is embedded in a sat package
1375     :return: the dictionary that stores all the needed directories and files to
1376              add in a project package.
1377              {label : (path_on_local_machine, path_in_archive)}
1378     :rtype: dict
1379     '''
1380     d_project = {}
1381     # Read the project file and get the directories to add to the package
1382     
1383     try: 
1384       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1385     except:
1386       logger.write("""
1387 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1388       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1389       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1390     
1391     paths = {"APPLICATIONPATH" : "applications",
1392              "PRODUCTPATH" : "products",
1393              "JOBPATH" : "jobs",
1394              "MACHINEPATH" : "machines"}
1395     if not ftp_mode:
1396         paths["ARCHIVEPATH"] = "archives"
1397
1398     # Loop over the project paths and add it
1399     project_file_name = os.path.basename(project_file_path)
1400     for path in paths:
1401         if path not in project_pyconf_cfg:
1402             continue
1403         if embedded_in_sat:
1404             dest_path = os.path.join("projects", name_project, paths[path])
1405             project_file_dest = os.path.join("projects", name_project, project_file_name)
1406         else:
1407             dest_path = paths[path]
1408             project_file_dest = project_file_name
1409
1410         # Add the directory to the files to add in the package
1411         d_project[path] = (project_pyconf_cfg[path], dest_path)
1412
1413         # Modify the value of the path in the package
1414         project_pyconf_cfg[path] = src.pyconf.Reference(
1415                                     project_pyconf_cfg,
1416                                     src.pyconf.DOLLAR,
1417                                     'project_path + "/' + paths[path] + '"')
1418     
1419     # Modify some values
1420     if "project_path" not in project_pyconf_cfg:
1421         project_pyconf_cfg.addMapping("project_path",
1422                                       src.pyconf.Mapping(project_pyconf_cfg),
1423                                       "")
1424     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1425                                                            src.pyconf.DOLLAR,
1426                                                            'PWD')
1427     # we don't want to export these two fields
1428     project_pyconf_cfg.__delitem__("file_path")
1429     project_pyconf_cfg.__delitem__("PWD")
1430     if ftp_mode:
1431         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1432     
1433     # Write the project pyconf file
1434     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1435     ff = open(project_pyconf_tmp_path, 'w')
1436     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1437     project_pyconf_cfg.__save__(ff, 1)
1438     ff.close()
1439     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1440     
1441     return d_project
1442
1443 def add_readme(config, options, where):
1444     readme_path = os.path.join(where, "README")
1445     with codecs.open(readme_path, "w", 'utf-8') as f:
1446
1447     # templates for building the header
1448         readme_header="""
1449 # This package was generated with sat $version
1450 # Date: $date
1451 # User: $user
1452 # Distribution : $dist
1453
1454 In the following, $$ROOT represents the directory where you have installed 
1455 SALOME (the directory where this file is located).
1456
1457 """
1458         if src.architecture.is_windows():
1459             readme_header = readme_header.replace('$$ROOT','%ROOT%')
1460         readme_compilation_with_binaries="""
1461
1462 compilation based on the binaries used as prerequisites
1463 =======================================================
1464
1465 If you fail to compile the complete application (for example because
1466 you are not root on your system and cannot install missing packages), you
1467 may try a partial compilation based on the binaries.
1468 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1469 and do some substitutions on cmake and .la files (replace the build directories
1470 with local paths).
1471 The procedure to do it is:
1472  1) Remove or rename INSTALL directory if it exists
1473  2) Execute the shell script install_bin.sh:
1474  > cd $ROOT
1475  > ./install_bin.sh
1476  3) Use SalomeTool (as explained in Sources section) and compile only the 
1477     modules you need to (with -p option)
1478
1479 """
1480         readme_header_tpl=string.Template(readme_header)
1481         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1482                 "README_BIN.template")
1483         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1484                 "README_LAUNCHER.template")
1485         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1486                 "README_BIN_VIRTUAL_APP.template")
1487         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1488                 "README_SRC.template")
1489         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1490                 "README_PROJECT.template")
1491         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1492                 "README_SAT.template")
1493
1494         # prepare substitution dictionary
1495         d = dict()
1496         d['user'] = config.VARS.user
1497         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1498         d['version'] = src.get_salometool_version(config)
1499         d['dist'] = config.VARS.dist
1500         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1501
1502         if options.binaries or options.sources:
1503             d['application'] = config.VARS.application
1504             d['BINARIES']    = config.INTERNAL.config.binary_dir
1505             d['SEPARATOR'] = config.VARS.sep
1506             if src.architecture.is_windows():
1507                 d['operatingSystem'] = 'Windows'
1508                 d['PYTHON3'] = 'python3'
1509                 d['ROOT']    = '%ROOT%'
1510             else:
1511                 d['operatingSystem'] = 'Linux'
1512                 d['PYTHON3'] = ''
1513                 d['ROOT']    = '$ROOT'
1514             f.write("# Application: " + d['application'] + "\n")
1515             if 'KERNEL' in config.APPLICATION.products:
1516                 VersionSalome = src.get_salome_version(config)
1517                 # Case where SALOME has the launcher that uses the SalomeContext API
1518                 if VersionSalome >= 730:
1519                     d['launcher'] = config.APPLICATION.profile.launcher_name
1520                 else:
1521                     d['virtual_app'] = 'runAppli' # this info is not used now)
1522
1523         # write the specific sections
1524         if options.binaries:
1525             f.write(src.template.substitute(readme_template_path_bin, d))
1526             if "virtual_app" in d:
1527                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1528             if "launcher" in d:
1529                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1530
1531         if options.sources:
1532             f.write(src.template.substitute(readme_template_path_src, d))
1533
1534         if options.binaries and options.sources and not src.architecture.is_windows():
1535             f.write(readme_compilation_with_binaries)
1536
1537         if options.project:
1538             f.write(src.template.substitute(readme_template_path_pro, d))
1539
1540         if options.sat:
1541             f.write(src.template.substitute(readme_template_path_sat, d))
1542     
1543     return readme_path
1544
1545 def update_config(config, logger,  prop, value):
1546     '''Remove from config.APPLICATION.products the products that have the property given as input.
1547     
1548     :param config Config: The global config.
1549     :param prop str: The property to filter
1550     :param value str: The value of the property to filter
1551     '''
1552     # if there is no APPLICATION (ex sat package -t) : nothing to do
1553     if "APPLICATION" in config:
1554         l_product_to_remove = []
1555         for product_name in config.APPLICATION.products.keys():
1556             prod_cfg = src.product.get_product_config(config, product_name)
1557             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1558                 l_product_to_remove.append(product_name)
1559         for product_name in l_product_to_remove:
1560             config.APPLICATION.products.__delitem__(product_name)
1561             logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1562
1563 def description():
1564     '''method that is called when salomeTools is called with --help option.
1565     
1566     :return: The text to display for the package command description.
1567     :rtype: str
1568     '''
1569     return _("""
1570 The package command creates a tar file archive of a product.
1571 There are four kinds of archive, which can be mixed:
1572
1573  1 - The binary archive. 
1574      It contains the product installation directories plus a launcher.
1575  2 - The sources archive. 
1576      It contains the product archives, a project (the application plus salomeTools).
1577  3 - The project archive. 
1578      It contains a project (give the project file path as argument).
1579  4 - The salomeTools archive. 
1580      It contains code utility salomeTools.
1581
1582 example:
1583  >> sat package SALOME-master --binaries --sources""")
1584   
1585 def run(args, runner, logger):
1586     '''method that is called when salomeTools is called with package parameter.
1587     '''
1588     
1589     # Parse the options
1590     (options, args) = parser.parse_args(args)
1591
1592     
1593     # Check that a type of package is called, and only one
1594     all_option_types = (options.binaries,
1595                         options.sources,
1596                         options.project not in ["", None],
1597                         options.sat,
1598                         options.bin_products)
1599
1600     # Check if no option for package type
1601     if all_option_types.count(True) == 0:
1602         msg = _("Error: Precise a type for the package\nUse one of the "
1603                 "following options: --binaries, --sources, --project or"
1604                 " --salometools, --bin_products")
1605         logger.write(src.printcolors.printcError(msg), 1)
1606         logger.write("\n", 1)
1607         return 1
1608     do_create_package = options.binaries or options.sources or options.project or options.sat 
1609
1610     if options.bin_products:
1611         ret = bin_products_archives(runner.cfg, logger)
1612     if ret!=0:
1613         return ret
1614     if not do_create_package:
1615         return 0
1616
1617     # continue to create a tar.gz package 
1618
1619     # The repository where to put the package if not Binary or Source
1620     package_default_path = runner.cfg.LOCAL.workdir
1621     # if the package contains binaries or sources:
1622     if options.binaries or options.sources or options.bin_products:
1623         # Check that the command has been called with an application
1624         src.check_config_has_application(runner.cfg)
1625
1626         # Display information
1627         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1628                                                     runner.cfg.VARS.application), 1)
1629         
1630         # Get the default directory where to put the packages
1631         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1632         src.ensure_path_exists(package_default_path)
1633         
1634     # if the package contains a project:
1635     if options.project:
1636         # check that the project is visible by SAT
1637         projectNameFile = options.project + ".pyconf"
1638         foundProject = None
1639         for i in runner.cfg.PROJECTS.project_file_paths:
1640             baseName = os.path.basename(i)
1641             if baseName == projectNameFile:
1642                 foundProject = i
1643                 break
1644
1645         if foundProject is None:
1646             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1647             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1648 known projects are:
1649 %(2)s
1650
1651 Please add it in file:
1652 %(3)s""" % \
1653                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1654             logger.write(src.printcolors.printcError(msg), 1)
1655             logger.write("\n", 1)
1656             return 1
1657         else:
1658             options.project_file_path = foundProject
1659             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1660     
1661     # Remove the products that are filtered by the --without_properties option
1662     if options.without_properties:
1663         prop, value = options.without_properties
1664         update_config(runner.cfg, logger, prop, value)
1665
1666     # Remove from config the products that have the not_in_package property
1667     update_config(runner.cfg, logger, "not_in_package", "yes")
1668
1669     # get the name of the archive or build it
1670     if options.name:
1671         if os.path.basename(options.name) == options.name:
1672             # only a name (not a path)
1673             archive_name = options.name           
1674             dir_name = package_default_path
1675         else:
1676             archive_name = os.path.basename(options.name)
1677             dir_name = os.path.dirname(options.name)
1678         
1679         # suppress extension
1680         if archive_name[-len(".tgz"):] == ".tgz":
1681             archive_name = archive_name[:-len(".tgz")]
1682         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1683             archive_name = archive_name[:-len(".tar.gz")]
1684         
1685     else:
1686         archive_name=""
1687         dir_name = package_default_path
1688         if options.binaries or options.sources:
1689             archive_name = runner.cfg.APPLICATION.name
1690
1691         if options.binaries:
1692             archive_name += "-"+runner.cfg.VARS.dist
1693             
1694         if options.sources:
1695             archive_name += "-SRC"
1696             if options.with_vcs:
1697                 archive_name += "-VCS"
1698
1699         if options.sat:
1700             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1701
1702         if options.project:
1703             if options.sat:
1704                 archive_name += "_" 
1705             archive_name += ("satproject_" + options.project)
1706  
1707         if len(archive_name)==0: # no option worked 
1708             msg = _("Error: Cannot name the archive\n"
1709                     " check if at least one of the following options was "
1710                     "selected : --binaries, --sources, --project or"
1711                     " --salometools")
1712             logger.write(src.printcolors.printcError(msg), 1)
1713             logger.write("\n", 1)
1714             return 1
1715  
1716     path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1717     
1718     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1719
1720     # Create a working directory for all files that are produced during the
1721     # package creation and that will be removed at the end of the command
1722     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1723     src.ensure_path_exists(tmp_working_dir)
1724     logger.write("\n", 5)
1725     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1726     
1727     logger.write("\n", 3)
1728
1729     msg = _("Preparation of files to add to the archive")
1730     logger.write(src.printcolors.printcLabel(msg), 2)
1731     logger.write("\n", 2)
1732     
1733     d_files_to_add={}  # content of the archive
1734
1735     # a dict to hold paths that will need to be substitute for users recompilations
1736     d_paths_to_substitute={}  
1737
1738     if options.binaries:
1739         d_bin_files_to_add = binary_package(runner.cfg,
1740                                             logger,
1741                                             options,
1742                                             tmp_working_dir)
1743         # for all binaries dir, store the substitution that will be required 
1744         # for extra compilations
1745         for key in d_bin_files_to_add:
1746             if key.endswith("(bin)"):
1747                 source_dir = d_bin_files_to_add[key][0]
1748                 path_in_archive = d_bin_files_to_add[key][1].replace(
1749                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1750                    runner.cfg.INTERNAL.config.install_dir)
1751                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1752                     # if basename is the same we will just substitute the dirname 
1753                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1754                         os.path.dirname(path_in_archive)
1755                 else:
1756                     d_paths_to_substitute[source_dir]=path_in_archive
1757
1758         d_files_to_add.update(d_bin_files_to_add)
1759     if options.sources:
1760         d_files_to_add.update(source_package(runner,
1761                                         runner.cfg,
1762                                         logger, 
1763                                         options,
1764                                         tmp_working_dir))
1765         if options.binaries:
1766             # for archives with bin and sources we provide a shell script able to 
1767             # install binaries for compilation
1768             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1769                                                       tmp_working_dir,
1770                                                       d_paths_to_substitute,
1771                                                       "install_bin.sh")
1772             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1773             logger.write("substitutions that need to be done later : \n", 5)
1774             logger.write(str(d_paths_to_substitute), 5)
1775             logger.write("\n", 5)
1776     else:
1777         # --salomeTool option is not considered when --sources is selected, as this option
1778         # already brings salomeTool!
1779         if options.sat:
1780             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1781                                   options, logger))
1782         
1783     if options.project:
1784         DBG.write("config for package %s" % options.project, runner.cfg)
1785         d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1786
1787     if not(d_files_to_add):
1788         msg = _("Error: Empty dictionnary to build the archive!\n")
1789         logger.write(src.printcolors.printcError(msg), 1)
1790         logger.write("\n", 1)
1791         return 1
1792
1793     # Add the README file in the package
1794     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1795     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1796
1797     # Add the additional files of option add_files
1798     if options.add_files:
1799         for file_path in options.add_files:
1800             if not os.path.exists(file_path):
1801                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1802                 continue
1803             file_name = os.path.basename(file_path)
1804             d_files_to_add[file_name] = (file_path, file_name)
1805
1806     logger.write("\n", 2)
1807     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1808     logger.write("\n", 2)
1809     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1810
1811     res = 0
1812     try:
1813         # Creating the object tarfile
1814         tar = tarfile.open(path_targz, mode='w:gz')
1815         
1816         # get the filtering function if needed
1817         if old_python:
1818             filter_function = exclude_VCS_and_extensions_26
1819         else:
1820             filter_function = exclude_VCS_and_extensions
1821
1822         # Add the files to the tarfile object
1823         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1824         tar.close()
1825     except KeyboardInterrupt:
1826         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1827         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1828         # remove the working directory
1829         shutil.rmtree(tmp_working_dir)
1830         logger.write(_("OK"), 1)
1831         logger.write(_("\n"), 1)
1832         return 1
1833     
1834     # case if no application, only package sat as 'sat package -t'
1835     try:
1836         app = runner.cfg.APPLICATION
1837     except:
1838         app = None
1839
1840     # unconditionaly remove the tmp_local_working_dir
1841     if app is not None:
1842         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1843         if os.path.isdir(tmp_local_working_dir):
1844             shutil.rmtree(tmp_local_working_dir)
1845
1846     # remove the tmp directory, unless user has registered as developer
1847     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1848         shutil.rmtree(tmp_working_dir)
1849     
1850     # Print again the path of the package
1851     logger.write("\n", 2)
1852     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1853     
1854     return res