Salome HOME
sat #26629 : généralisation du patch pour avoir dans le nom du lanceur correct pour...
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28 import sys
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
35
36 BINARY = "binary"
37 SOURCE = "Source"
38 PROJECT = "Project"
39 SAT = "Sat"
40
41 ARCHIVE_DIR = "ARCHIVES"
42 PROJECT_DIR = "PROJECT"
43
44 IGNORED_DIRS = [".git", ".svn"]
45 IGNORED_EXTENSIONS = []
46
47 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
48
49 PROJECT_TEMPLATE = """#!/usr/bin/env python
50 #-*- coding:utf-8 -*-
51
52 # The path to the archive root directory
53 root_path : $PWD + "/../"
54 # path to the PROJECT
55 project_path : $PWD + "/"
56
57 # Where to search the archives of the products
58 ARCHIVEPATH : $root_path + "ARCHIVES"
59 # Where to search the pyconf of the applications
60 APPLICATIONPATH : $project_path + "applications/"
61 # Where to search the pyconf of the products
62 PRODUCTPATH : $project_path + "products/"
63 # Where to search the pyconf of the jobs of the project
64 JOBPATH : $project_path + "jobs/"
65 # Where to search the pyconf of the machines of the project
66 MACHINEPATH : $project_path + "machines/"
67 """
68
69 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
70 #-*- coding:utf-8 -*-
71
72   LOCAL :
73   {
74     base : 'default'
75     workdir : 'default'
76     log_dir : 'default'
77     archive_dir : 'default'
78     VCS : 'unknown'
79     tag : 'unknown'
80   }
81
82 PROJECTS :
83 {
84 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
85 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
86 }
87 """)
88
89 # Define all possible option for the package command :  sat package <options>
90 parser = src.options.Options()
91 parser.add_option('b', 'binaries', 'boolean', 'binaries',
92     _('Optional: Produce a binary package.'), False)
93 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
94     _('Optional: Only binary package: produce the archive even if '
95       'there are some missing products.'), False)
96 parser.add_option('s', 'sources', 'boolean', 'sources',
97     _('Optional: Produce a compilable archive of the sources of the '
98       'application.'), False)
99 parser.add_option('', 'bin_products', 'boolean', 'bin_products',
100     _('Optional: Create binary archives for all products.'), False)
101 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
102     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
103       'Sat prepare will use VCS mode instead to retrieve them'),
104     False)
105 parser.add_option('', 'ftp', 'boolean', 'ftp',
106     _('Optional: Do not embed archives for products in archive mode.' 
107     'Sat prepare will use ftp instead to retrieve them'),
108     False)
109 parser.add_option('e', 'exe', 'string', 'exe',
110     _('Optional: Produce an extra launcher based upon the exe given as argument.'), "")
111 parser.add_option('p', 'project', 'string', 'project',
112     _('Optional: Produce an archive that contains a project.'), "")
113 parser.add_option('t', 'salometools', 'boolean', 'sat',
114     _('Optional: Produce an archive that contains salomeTools.'), False)
115 parser.add_option('n', 'name', 'string', 'name',
116     _('Optional: The name or full path of the archive.'), None)
117 parser.add_option('', 'add_files', 'list2', 'add_files',
118     _('Optional: The list of additional files to add to the archive.'), [])
119 parser.add_option('', 'without_properties', 'properties', 'without_properties',
120     _('Optional: Filter the products by their properties.\n\tSyntax: '
121       '--without_properties <property>:<value>'))
122
123
124 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
125     '''Create an archive containing all directories and files that are given in
126        the d_content argument.
127     
128     :param tar tarfile: The tarfile instance used to make the archive.
129     :param name_archive str: The name of the archive to make.
130     :param d_content dict: The dictionary that contain all directories and files
131                            to add in the archive.
132                            d_content[label] = 
133                                         (path_on_local_machine, path_in_archive)
134     :param logger Logger: the logging instance
135     :param f_exclude Function: the function that filters
136     :return: 0 if success, 1 if not.
137     :rtype: int
138     '''
139     # get the max length of the messages in order to make the display
140     max_len = len(max(d_content.keys(), key=len))
141     
142     success = 0
143     # loop over each directory or file stored in the d_content dictionary
144     names = sorted(d_content.keys())
145     DBG.write("add tar names", names)
146
147     # used to avoid duplications (for pip install in python, or single_install_dir cases)
148     already_added=set() 
149     for name in names:
150         # display information
151         len_points = max_len - len(name) + 3
152         local_path, archive_path = d_content[name]
153         in_archive = os.path.join(name_archive, archive_path)
154         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
155         # Get the local path and the path in archive 
156         # of the directory or file to add
157         # Add it in the archive
158         try:
159             key=local_path+"->"+in_archive
160             if key not in already_added:
161                 if old_python:
162                     tar.add(local_path,
163                                  arcname=in_archive,
164                                  exclude=exclude_VCS_and_extensions_26)
165                 else:
166                     tar.add(local_path,
167                                  arcname=in_archive,
168                                  filter=exclude_VCS_and_extensions)
169                 already_added.add(key)
170             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
171         except Exception as e:
172             logger.write(src.printcolors.printcError(_("KO ")), 3)
173             logger.write(str(e), 3)
174             success = 1
175         logger.write("\n", 3)
176     return success
177
178
179 def exclude_VCS_and_extensions_26(filename):
180     ''' The function that is used to exclude from package the link to the 
181         VCS repositories (like .git) (only for python 2.6)
182
183     :param filename Str: The filname to exclude (or not).
184     :return: True if the file has to be exclude
185     :rtype: Boolean
186     '''
187     for dir_name in IGNORED_DIRS:
188         if dir_name in filename:
189             return True
190     for extension in IGNORED_EXTENSIONS:
191         if filename.endswith(extension):
192             return True
193     return False
194
195 def exclude_VCS_and_extensions(tarinfo):
196     ''' The function that is used to exclude from package the link to the 
197         VCS repositories (like .git)
198
199     :param filename Str: The filname to exclude (or not).
200     :return: None if the file has to be exclude
201     :rtype: tarinfo or None
202     '''
203     filename = tarinfo.name
204     for dir_name in IGNORED_DIRS:
205         if dir_name in filename:
206             return None
207     for extension in IGNORED_EXTENSIONS:
208         if filename.endswith(extension):
209             return None
210     return tarinfo
211
212 def produce_relative_launcher(config,
213                               logger,
214                               file_dir,
215                               file_name,
216                               binaries_dir_name):
217     '''Create a specific SALOME launcher for the binary package. This launcher 
218        uses relative paths.
219     
220     :param config Config: The global configuration.
221     :param logger Logger: the logging instance
222     :param file_dir str: the directory where to put the launcher
223     :param file_name str: The launcher name
224     :param binaries_dir_name str: the name of the repository where the binaries
225                                   are, in the archive.
226     :return: the path of the produced launcher
227     :rtype: str
228     '''
229     
230     # set base mode to "no" for the archive - save current mode to restore it at the end
231     if "base" in config.APPLICATION:
232         base_setting=config.APPLICATION.base 
233     else:
234         base_setting="maybe"
235     config.APPLICATION.base="no"
236
237     # get KERNEL installation path 
238     kernel_info = src.product.get_product_config(config, "KERNEL")
239     kernel_base_name=os.path.basename(kernel_info.install_dir)
240     if kernel_info.install_mode == "base":
241         # case of kernel installed in base. the kernel install dir name is different in the archive
242         kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
243     
244     kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
245
246     # set kernel bin dir (considering fhs property)
247     kernel_cfg = src.product.get_product_config(config, "KERNEL")
248     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
249         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
250     else:
251         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
252
253     # check if the application contains an application module
254     # check also if the application has a distene product, 
255     # in this case get its licence file name
256     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
257     salome_application_name="Not defined" 
258     distene_licence_file_name=False
259     for prod_name, prod_info in l_product_info:
260         # look for a "salome application" and a distene product
261         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
262             distene_licence_file_name = src.product.product_has_licence(prod_info, 
263                                             config.PATHS.LICENCEPATH) 
264         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
265             salome_application_name=prod_info.name
266
267     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
268     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
269     if salome_application_name == "Not defined":
270         app_root_dir=kernel_root_dir
271     else:
272         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
273
274     additional_env={}
275     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
276                                                    config.VARS.sep + bin_kernel_install_dir
277     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
278         additional_env['sat_python_version'] = 3
279     else:
280         additional_env['sat_python_version'] = 2
281
282     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
283     launcher_name = src.get_launcher_name(config)
284     additional_env['APPLI'] = "out_dir_Path" + config.VARS.sep + file_name
285
286     # create an environment file writer
287     writer = src.environment.FileEnvWriter(config,
288                                            logger,
289                                            file_dir,
290                                            src_root=None,
291                                            env_info=None)
292     
293     filepath = os.path.join(file_dir, file_name)
294     # Write
295     writer.write_env_file(filepath,
296                           False,  # for launch
297                           "cfgForPy",
298                           additional_env=additional_env,
299                           no_path_init="False",
300                           for_package = binaries_dir_name)
301     
302     # Little hack to put out_dir_Path outside the strings
303     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
304     src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
305     
306     # A hack to put a call to a file for distene licence.
307     # It does nothing to an application that has no distene product
308     if distene_licence_file_name:
309         logger.write("Application has a distene licence file! We use it in package launcher", 5)
310         hack_for_distene_licence(filepath, distene_licence_file_name)
311        
312     # change the rights in order to make the file executable for everybody
313     os.chmod(filepath,
314              stat.S_IRUSR |
315              stat.S_IRGRP |
316              stat.S_IROTH |
317              stat.S_IWUSR |
318              stat.S_IXUSR |
319              stat.S_IXGRP |
320              stat.S_IXOTH)
321
322     # restore modified setting by its initial value
323     config.APPLICATION.base=base_setting
324
325     return filepath
326
327 def hack_for_distene_licence(filepath, licence_file):
328     '''Replace the distene licence env variable by a call to a file.
329     
330     :param filepath Str: The path to the launcher to modify.
331     '''  
332     shutil.move(filepath, filepath + "_old")
333     fileout= filepath
334     filein = filepath + "_old"
335     fin = open(filein, "r")
336     fout = open(fileout, "w")
337     text = fin.readlines()
338     # Find the Distene section
339     num_line = -1
340     for i,line in enumerate(text):
341         if "# Set DISTENE License" in line:
342             num_line = i
343             break
344     if num_line == -1:
345         # No distene product, there is nothing to do
346         fin.close()
347         for line in text:
348             fout.write(line)
349         fout.close()
350         return
351     del text[num_line +1]
352     del text[num_line +1]
353     text_to_insert ="""    try:
354         distene_licence_file=r"%s"
355         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
356             import importlib.util
357             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
358             distene=importlib.util.module_from_spec(spec_dist)
359             spec_dist.loader.exec_module(distene)
360         else:
361             import imp
362             distene = imp.load_source('distene_licence', distene_licence_file)
363         distene.set_distene_variables(context)
364     except:
365         pass\n"""  % licence_file
366     text.insert(num_line + 1, text_to_insert)
367     for line in text:
368         fout.write(line)
369     fin.close()    
370     fout.close()
371     return
372     
373 def produce_relative_env_files(config,
374                               logger,
375                               file_dir,
376                               binaries_dir_name,
377                               exe_name=None):
378     '''Create some specific environment files for the binary package. These 
379        files use relative paths.
380     
381     :param config Config: The global configuration.
382     :param logger Logger: the logging instance
383     :param file_dir str: the directory where to put the files
384     :param binaries_dir_name str: the name of the repository where the binaries
385                                   are, in the archive.
386     :param exe_name str: if given generate a launcher executing exe_name
387     :return: the list of path of the produced environment files
388     :rtype: List
389     '''  
390
391     # set base mode to "no" for the archive - save current mode to restore it at the end
392     if "base" in config.APPLICATION:
393         base_setting=config.APPLICATION.base 
394     else:
395         base_setting="maybe"
396     config.APPLICATION.base="no"
397
398     # create an environment file writer
399     writer = src.environment.FileEnvWriter(config,
400                                            logger,
401                                            file_dir,
402                                            src_root=None)
403     
404     if src.architecture.is_windows():
405       shell = "bat"
406       filename  = "env_launch.bat"
407     else:
408       shell = "bash"
409       filename  = "env_launch.sh"
410
411     if exe_name:
412         filename=os.path.basename(exe_name)
413
414     # Write
415     filepath = writer.write_env_file(filename,
416                           False, # for launch
417                           shell,
418                           for_package = binaries_dir_name)
419
420     # Little hack to put out_dir_Path as environment variable
421     if src.architecture.is_windows() :
422       src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
423       src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
424       src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
425     else:
426       src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
427       src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
428
429     if exe_name:
430         if src.architecture.is_windows():
431             cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
432         else:
433             cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
434         with open(filepath, "a") as exe_launcher:
435             exe_launcher.write(cmd)
436
437     # change the rights in order to make the file executable for everybody
438     os.chmod(filepath,
439              stat.S_IRUSR |
440              stat.S_IRGRP |
441              stat.S_IROTH |
442              stat.S_IWUSR |
443              stat.S_IXUSR |
444              stat.S_IXGRP |
445              stat.S_IXOTH)
446     
447     # restore modified setting by its initial value
448     config.APPLICATION.base=base_setting
449
450     return filepath
451
452 def produce_install_bin_file(config,
453                              logger,
454                              file_dir,
455                              d_sub,
456                              file_name):
457     '''Create a bash shell script which do substitutions in BIRARIES dir 
458        in order to use it for extra compilations.
459     
460     :param config Config: The global configuration.
461     :param logger Logger: the logging instance
462     :param file_dir str: the directory where to put the files
463     :param d_sub, dict: the dictionnary that contains the substitutions to be done
464     :param file_name str: the name of the install script file
465     :return: the produced file
466     :rtype: str
467     '''  
468     # Write
469     filepath = os.path.join(file_dir, file_name)
470     # open the file and write into it
471     # use codec utf-8 as sat variables are in unicode
472     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
473         installbin_template_path = os.path.join(config.VARS.internal_dir,
474                                         "INSTALL_BIN.template")
475         
476         # build the name of the directory that will contain the binaries
477         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
478         # build the substitution loop
479         loop_cmd = "for f in $(grep -RIl"
480         for key in d_sub:
481             loop_cmd += " -e "+ key
482         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
483                     '); do\n     sed -i "\n'
484         for key in d_sub:
485             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
486         loop_cmd += '            " $f\ndone'
487
488         d={}
489         d["BINARIES_DIR"] = binaries_dir_name
490         d["SUBSTITUTION_LOOP"]=loop_cmd
491         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
492         
493         # substitute the template and write it in file
494         content=src.template.substitute(installbin_template_path, d)
495         installbin_file.write(content)
496         # change the rights in order to make the file executable for everybody
497         os.chmod(filepath,
498                  stat.S_IRUSR |
499                  stat.S_IRGRP |
500                  stat.S_IROTH |
501                  stat.S_IWUSR |
502                  stat.S_IXUSR |
503                  stat.S_IXGRP |
504                  stat.S_IXOTH)
505     
506     return filepath
507
508 def product_appli_creation_script(config,
509                                   logger,
510                                   file_dir,
511                                   binaries_dir_name):
512     '''Create a script that can produce an application (EDF style) in the binary
513        package.
514     
515     :param config Config: The global configuration.
516     :param logger Logger: the logging instance
517     :param file_dir str: the directory where to put the file
518     :param binaries_dir_name str: the name of the repository where the binaries
519                                   are, in the archive.
520     :return: the path of the produced script file
521     :rtype: Str
522     '''
523     template_name = "create_appli.py.for_bin_packages.template"
524     template_path = os.path.join(config.VARS.internal_dir, template_name)
525     text_to_fill = open(template_path, "r").read()
526     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
527                                         '"' + binaries_dir_name + '"')
528     
529     text_to_add = ""
530     for product_name in get_SALOME_modules(config):
531         product_info = src.product.get_product_config(config, product_name)
532        
533         if src.product.product_is_smesh_plugin(product_info):
534             continue
535
536         if 'install_dir' in product_info and bool(product_info.install_dir):
537             if src.product.product_is_cpp(product_info):
538                 # cpp module
539                 for cpp_name in src.product.get_product_components(product_info):
540                     line_to_add = ("<module name=\"" + 
541                                    cpp_name + 
542                                    "\" gui=\"yes\" path=\"''' + "
543                                    "os.path.join(dir_bin_name, \"" + 
544                                    cpp_name + "\") + '''\"/>")
545             else:
546                 # regular module
547                 line_to_add = ("<module name=\"" + 
548                                product_name + 
549                                "\" gui=\"yes\" path=\"''' + "
550                                "os.path.join(dir_bin_name, \"" + 
551                                product_name + "\") + '''\"/>")
552             text_to_add += line_to_add + "\n"
553     
554     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
555     
556     tmp_file_path = os.path.join(file_dir, "create_appli.py")
557     ff = open(tmp_file_path, "w")
558     ff.write(filled_text)
559     ff.close()
560     
561     # change the rights in order to make the file executable for everybody
562     os.chmod(tmp_file_path,
563              stat.S_IRUSR |
564              stat.S_IRGRP |
565              stat.S_IROTH |
566              stat.S_IWUSR |
567              stat.S_IXUSR |
568              stat.S_IXGRP |
569              stat.S_IXOTH)
570     
571     return tmp_file_path
572
573 def bin_products_archives(config, logger):
574     '''Prepare binary packages for all products
575     :param config Config: The global configuration.
576     :return: the error status
577     :rtype: bool
578     '''
579
580     logger.write("Make %s binary archives\n" % config.VARS.dist)
581     # Get the default directory where to put the packages
582     binpackage_path = os.path.join(config.APPLICATION.workdir, "PACKAGE", "products")
583     src.ensure_path_exists(binpackage_path)
584     # Get the list of product installation to add to the archive
585     l_products_name = sorted(config.APPLICATION.products.keys())
586     l_product_info = src.product.get_products_infos(l_products_name,
587                                                     config)
588     # first loop on products : filter products, analyse properties,
589     # and store the information that will be used to create the archive in the second loop 
590     l_not_installed=[] # store not installed products for warning at the end
591     for prod_name, prod_info in l_product_info:
592         # ignore the native and fixed products for install directories
593         if (src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
594                 or src.product.product_is_native(prod_info) 
595                 or src.product.product_is_fixed(prod_info)
596                 or not src.product.product_compiles(prod_info)):
597             continue
598         if not src.product.check_installation(config, prod_info):
599             l_not_installed.append(prod_name)
600             continue  # product is not installed, we skip it
601         # prepare call to make_bin_archive
602         path_targz_prod = os.path.join(binpackage_path, prod_name + '-' + prod_info.version + "-" + config.VARS.dist + PACKAGE_EXT) 
603         targz_prod = tarfile.open(path_targz_prod, mode='w:gz')
604         bin_path = prod_info.install_dir
605         targz_prod.add(bin_path)
606         targz_prod.close()
607         # Python program to find MD5 hash value of a file
608         import hashlib
609         with open(path_targz_prod,"rb") as f:
610             bytes = f.read() # read file as bytes
611             readable_hash = hashlib.md5(bytes).hexdigest();
612             with open(path_targz_prod+".md5", "w") as md5sum:
613                md5sum.write("%s  %s" % (readable_hash, os.path.basename(path_targz_prod))) 
614             logger.write("   archive : %s   (md5sum = %s)\n" % (path_targz_prod, readable_hash))
615
616     return 0
617
618 def binary_package(config, logger, options, tmp_working_dir):
619     '''Prepare a dictionary that stores all the needed directories and files to
620        add in a binary package.
621     
622     :param config Config: The global configuration.
623     :param logger Logger: the logging instance
624     :param options OptResult: the options of the launched command
625     :param tmp_working_dir str: The temporary local directory containing some 
626                                 specific directories or files needed in the 
627                                 binary package
628     :return: the dictionary that stores all the needed directories and files to
629              add in a binary package.
630              {label : (path_on_local_machine, path_in_archive)}
631     :rtype: dict
632     '''
633
634     # Get the list of product installation to add to the archive
635     l_products_name = sorted(config.APPLICATION.products.keys())
636     l_product_info = src.product.get_products_infos(l_products_name,
637                                                     config)
638
639     # suppress compile time products for binaries-only archives
640     if not options.sources:
641         update_config(config, logger, "compile_time", "yes")
642
643     l_install_dir = []
644     l_source_dir = []
645     l_not_installed = []
646     l_sources_not_present = []
647     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
648     if ("APPLICATION" in config  and
649         "properties"  in config.APPLICATION  and
650         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
651         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
652             generate_mesa_launcher=True
653
654     # first loop on products : filter products, analyse properties,
655     # and store the information that will be used to create the archive in the second loop 
656     for prod_name, prod_info in l_product_info:
657         # skip product with property not_in_package set to yes
658         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
659             continue  
660
661         # Add the sources of the products that have the property 
662         # sources_in_package : "yes"
663         if src.get_property_in_product_cfg(prod_info,
664                                            "sources_in_package") == "yes":
665             if os.path.exists(prod_info.source_dir):
666                 l_source_dir.append((prod_name, prod_info.source_dir))
667             else:
668                 l_sources_not_present.append(prod_name)
669
670         # ignore the native and fixed products for install directories
671         if (src.product.product_is_native(prod_info) 
672                 or src.product.product_is_fixed(prod_info)
673                 or not src.product.product_compiles(prod_info)):
674             continue
675         # 
676         # products with single_dir property will be installed in the PRODUCTS directory of the archive
677         is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
678                        src.product.product_test_property(prod_info,"single_install_dir", "yes"))
679         if src.product.check_installation(config, prod_info):
680             l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir,
681                                   is_single_dir, prod_info.install_mode))
682         else:
683             l_not_installed.append(prod_name)
684         
685         # Add also the cpp generated modules (if any)
686         if src.product.product_is_cpp(prod_info):
687             # cpp module
688             for name_cpp in src.product.get_product_components(prod_info):
689                 install_dir = os.path.join(config.APPLICATION.workdir,
690                                            config.INTERNAL.config.install_dir,
691                                            name_cpp) 
692                 if os.path.exists(install_dir):
693                     l_install_dir.append((name_cpp, name_cpp, install_dir, False, "value"))
694                 else:
695                     l_not_installed.append(name_cpp)
696         
697     # check the name of the directory that (could) contains the binaries 
698     # from previous detar
699     binaries_from_detar = os.path.join(
700                               config.APPLICATION.workdir,
701                               config.INTERNAL.config.binary_dir + config.VARS.dist)
702     if os.path.exists(binaries_from_detar):
703          logger.write("""
704 WARNING: existing binaries directory from previous detar installation:
705          %s
706          To make new package from this, you have to: 
707          1) install binaries in INSTALL directory with the script "install_bin.sh" 
708             see README file for more details
709          2) or recompile everything in INSTALL with "sat compile" command 
710             this step is long, and requires some linux packages to be installed 
711             on your system\n
712 """ % binaries_from_detar)
713     
714     # Print warning or error if there are some missing products
715     if len(l_not_installed) > 0:
716         text_missing_prods = ""
717         for p_name in l_not_installed:
718             text_missing_prods += " - " + p_name + "\n"
719         if not options.force_creation:
720             msg = _("ERROR: there are missing product installations:")
721             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
722                                      text_missing_prods),
723                          1)
724             raise src.SatException(msg)
725         else:
726             msg = _("WARNING: there are missing products installations:")
727             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
728                                      text_missing_prods),
729                          1)
730
731     # Do the same for sources
732     if len(l_sources_not_present) > 0:
733         text_missing_prods = ""
734         for p_name in l_sources_not_present:
735             text_missing_prods += "-" + p_name + "\n"
736         if not options.force_creation:
737             msg = _("ERROR: there are missing product sources:")
738             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
739                                      text_missing_prods),
740                          1)
741             raise src.SatException(msg)
742         else:
743             msg = _("WARNING: there are missing products sources:")
744             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
745                                      text_missing_prods),
746                          1)
747  
748     # construct the name of the directory that will contain the binaries
749     if src.architecture.is_windows():
750         binaries_dir_name = config.INTERNAL.config.binary_dir
751     else:
752         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
753     # construct the correlation table between the product names, there 
754     # actual install directories and there install directory in archive
755     d_products = {}
756     for prod_name, prod_info_name, install_dir, is_single_dir, install_mode in l_install_dir:
757         prod_base_name=os.path.basename(install_dir)
758         if install_mode == "base":
759             # case of a products installed in base. 
760             # because the archive is in base:no mode, the name of the install dir is different inside archive
761             # we set it to the product name or by PRODUCTS if single-dir
762             if is_single_dir:
763                 prod_base_name=config.INTERNAL.config.single_install_dir
764             else:
765                 prod_base_name=prod_info_name
766         path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
767         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
768         
769     for prod_name, source_dir in l_source_dir:
770         path_in_archive = os.path.join("SOURCES", prod_name)
771         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
772
773     # for packages of SALOME applications including KERNEL, 
774     # we produce a salome launcher or a virtual application (depending on salome version)
775     if 'KERNEL' in config.APPLICATION.products:
776         VersionSalome = src.get_salome_version(config)
777         # Case where SALOME has the launcher that uses the SalomeContext API
778         if VersionSalome >= 730:
779             # create the relative launcher and add it to the files to add
780             launcher_name = src.get_launcher_name(config)
781             launcher_package = produce_relative_launcher(config,
782                                                  logger,
783                                                  tmp_working_dir,
784                                                  launcher_name,
785                                                  binaries_dir_name)
786             d_products["launcher"] = (launcher_package, launcher_name)
787
788             # if the application contains mesa products, we generate in addition to the 
789             # classical salome launcher a launcher using mesa and called mesa_salome 
790             # (the mesa launcher will be used for remote usage through ssh).
791             if generate_mesa_launcher:
792                 #if there is one : store the use_mesa property
793                 restore_use_mesa_option=None
794                 if ('properties' in config.APPLICATION and 
795                     'use_mesa' in config.APPLICATION.properties):
796                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
797
798                 # activate mesa property, and generate a mesa launcher
799                 src.activate_mesa_property(config)  #activate use_mesa property
800                 launcher_mesa_name="mesa_"+launcher_name
801                 launcher_package_mesa = produce_relative_launcher(config,
802                                                      logger,
803                                                      tmp_working_dir,
804                                                      launcher_mesa_name,
805                                                      binaries_dir_name)
806                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
807
808                 # if there was a use_mesa value, we restore it
809                 # else we set it to the default value "no"
810                 if restore_use_mesa_option != None:
811                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
812                 else:
813                     config.APPLICATION.properties.use_mesa="no"
814
815             if options.sources:
816                 # if we mix binaries and sources, we add a copy of the launcher, 
817                 # prefixed  with "bin",in order to avoid clashes
818                 launcher_copy_name="bin"+launcher_name
819                 launcher_package_copy = produce_relative_launcher(config,
820                                                      logger,
821                                                      tmp_working_dir,
822                                                      launcher_copy_name,
823                                                      binaries_dir_name)
824                 d_products["launcher (copy)"] = (launcher_package_copy, launcher_copy_name)
825         else:
826             # Provide a script for the creation of an application EDF style
827             appli_script = product_appli_creation_script(config,
828                                                         logger,
829                                                         tmp_working_dir,
830                                                         binaries_dir_name)
831             
832             d_products["appli script"] = (appli_script, "create_appli.py")
833
834     # Put also the environment file
835     env_file = produce_relative_env_files(config,
836                                            logger,
837                                            tmp_working_dir,
838                                            binaries_dir_name)
839
840     if src.architecture.is_windows():
841       filename  = "env_launch.bat"
842     else:
843       filename  = "env_launch.sh"
844     d_products["environment file"] = (env_file, filename)      
845
846     # If option exe, produce an extra launcher based on specified exe
847     if options.exe:
848         exe_file = produce_relative_env_files(config,
849                                               logger,
850                                               tmp_working_dir,
851                                               binaries_dir_name,
852                                               options.exe)
853             
854         if src.architecture.is_windows():
855           filename  = os.path.basename(options.exe) + ".bat"
856         else:
857           filename  = os.path.basename(options.exe) + ".sh"
858         d_products["exe file"] = (exe_file, filename)      
859     
860
861     return d_products
862
863 def source_package(sat, config, logger, options, tmp_working_dir):
864     '''Prepare a dictionary that stores all the needed directories and files to
865        add in a source package.
866     
867     :param config Config: The global configuration.
868     :param logger Logger: the logging instance
869     :param options OptResult: the options of the launched command
870     :param tmp_working_dir str: The temporary local directory containing some 
871                                 specific directories or files needed in the 
872                                 binary package
873     :return: the dictionary that stores all the needed directories and files to
874              add in a source package.
875              {label : (path_on_local_machine, path_in_archive)}
876     :rtype: dict
877     '''
878     
879     d_archives={}
880     # Get all the products that are prepared using an archive
881     # unless ftp mode is specified (in this case the user of the
882     # archive will get the sources through the ftp mode of sat prepare
883     if not options.ftp:
884         logger.write("Find archive products ... ")
885         d_archives, l_pinfo_vcs = get_archives(config, logger)
886         logger.write("Done\n")
887
888     d_archives_vcs = {}
889     if not options.with_vcs and len(l_pinfo_vcs) > 0:
890         # Make archives with the products that are not prepared using an archive
891         # (git, cvs, svn, etc)
892         logger.write("Construct archives for vcs products ... ")
893         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
894                                           sat,
895                                           config,
896                                           logger,
897                                           tmp_working_dir)
898         logger.write("Done\n")
899
900     # Create a project
901     logger.write("Create the project ... ")
902     d_project = create_project_for_src_package(config,
903                                                tmp_working_dir,
904                                                options.with_vcs,
905                                                options.ftp)
906     logger.write("Done\n")
907     
908     # Add salomeTools
909     tmp_sat = add_salomeTools(config, tmp_working_dir)
910     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
911     
912     # Add a sat symbolic link if not win
913     if not src.architecture.is_windows():
914         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
915         try:
916             t = os.getcwd()
917         except:
918             # In the jobs, os.getcwd() can fail
919             t = config.LOCAL.workdir
920         os.chdir(tmp_working_dir)
921         if os.path.lexists(tmp_satlink_path):
922             os.remove(tmp_satlink_path)
923         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
924         os.chdir(t)
925         
926         d_sat["sat link"] = (tmp_satlink_path, "sat")
927     
928     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
929     return d_source
930
931 def get_archives(config, logger):
932     '''Find all the products that are get using an archive and all the products
933        that are get using a vcs (git, cvs, svn) repository.
934     
935     :param config Config: The global configuration.
936     :param logger Logger: the logging instance
937     :return: the dictionary {name_product : 
938              (local path of its archive, path in the package of its archive )}
939              and the list of specific configuration corresponding to the vcs 
940              products
941     :rtype: (Dict, List)
942     '''
943     # Get the list of product informations
944     l_products_name = config.APPLICATION.products.keys()
945     l_product_info = src.product.get_products_infos(l_products_name,
946                                                     config)
947     d_archives = {}
948     l_pinfo_vcs = []
949     for p_name, p_info in l_product_info:
950         # skip product with property not_in_package set to yes
951         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
952             continue  
953         # ignore the native and fixed products
954         if (src.product.product_is_native(p_info) 
955                 or src.product.product_is_fixed(p_info)):
956             continue
957         if p_info.get_source == "archive":
958             archive_path = p_info.archive_info.archive_name
959             archive_name = os.path.basename(archive_path)
960             d_archives[p_name] = (archive_path,
961                                   os.path.join(ARCHIVE_DIR, archive_name))
962             if (src.appli_test_property(config,"pip", "yes") and 
963                 src.product.product_test_property(p_info,"pip", "yes")):
964                 # if pip mode is activated, and product is managed by pip
965                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
966                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
967                     "%s-%s*" % (p_info.name, p_info.version))
968                 pip_wheel_path=glob.glob(pip_wheel_pattern)
969                 msg_pip_not_found="Error in get_archive, pip wheel for "\
970                                   "product %s-%s was not found in %s directory"
971                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
972                                   "product %s-%s were found in %s directory"
973                 if len(pip_wheel_path)==0:
974                     raise src.SatException(msg_pip_not_found %\
975                         (p_info.name, p_info.version, pip_wheels_dir))
976                 if len(pip_wheel_path)>1:
977                     raise src.SatException(msg_pip_two_or_more %\
978                         (p_info.name, p_info.version, pip_wheels_dir))
979
980                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
981                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
982                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
983         else:
984             # this product is not managed by archive, 
985             # an archive of the vcs directory will be created by get_archive_vcs
986             l_pinfo_vcs.append((p_name, p_info)) 
987             
988     return d_archives, l_pinfo_vcs
989
990 def add_salomeTools(config, tmp_working_dir):
991     '''Prepare a version of salomeTools that has a specific local.pyconf file 
992        configured for a source package.
993
994     :param config Config: The global configuration.
995     :param tmp_working_dir str: The temporary local directory containing some 
996                                 specific directories or files needed in the 
997                                 source package
998     :return: The path to the local salomeTools directory to add in the package
999     :rtype: str
1000     '''
1001     # Copy sat in the temporary working directory
1002     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
1003     sat_running_path = src.Path(config.VARS.salometoolsway)
1004     sat_running_path.copy(sat_tmp_path)
1005     
1006     # Update the local.pyconf file that contains the path to the project
1007     local_pyconf_name = "local.pyconf"
1008     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
1009     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
1010     # Remove the .pyconf file in the root directory of salomeTools if there is
1011     # any. (For example when launching jobs, a pyconf file describing the jobs 
1012     # can be here and is not useful) 
1013     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
1014     for file_or_dir in files_or_dir_SAT:
1015         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
1016             file_path = os.path.join(tmp_working_dir,
1017                                      "salomeTools",
1018                                      file_or_dir)
1019             os.remove(file_path)
1020     
1021     ff = open(local_pyconf_file, "w")
1022     ff.write(LOCAL_TEMPLATE)
1023     ff.close()
1024     
1025     return sat_tmp_path.path
1026
1027 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
1028     '''For sources package that require that all products are get using an 
1029        archive, one has to create some archive for the vcs products.
1030        So this method calls the clean and source command of sat and then create
1031        the archives.
1032
1033     :param l_pinfo_vcs List: The list of specific configuration corresponding to
1034                              each vcs product
1035     :param sat Sat: The Sat instance that can be called to clean and source the
1036                     products
1037     :param config Config: The global configuration.
1038     :param logger Logger: the logging instance
1039     :param tmp_working_dir str: The temporary local directory containing some 
1040                                 specific directories or files needed in the 
1041                                 source package
1042     :return: the dictionary that stores all the archives to add in the source 
1043              package. {label : (path_on_local_machine, path_in_archive)}
1044     :rtype: dict
1045     '''
1046     # clean the source directory of all the vcs products, then use the source 
1047     # command and thus construct an archive that will not contain the patches
1048     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
1049     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
1050       logger.write(_("\nclean sources\n"))
1051       args_clean = config.VARS.application
1052       args_clean += " --sources --products "
1053       args_clean += ",".join(l_prod_names)
1054       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
1055       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
1056     if True:
1057       # source
1058       logger.write(_("get sources\n"))
1059       args_source = config.VARS.application
1060       args_source += " --products "
1061       args_source += ",".join(l_prod_names)
1062       svgDir = sat.cfg.APPLICATION.workdir
1063       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
1064       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
1065       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
1066       # DBG.write("sat config id", id(sat.cfg), True)
1067       # shit as config is not same id() as for sat.source()
1068       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
1069       import source
1070       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
1071       
1072       # make the new archives
1073       d_archives_vcs = {}
1074       for pn, pinfo in l_pinfo_vcs:
1075           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
1076           logger.write("make archive vcs '%s'\n" % path_archive)
1077           d_archives_vcs[pn] = (path_archive,
1078                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
1079       sat.cfg.APPLICATION.workdir = svgDir
1080       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
1081     return d_archives_vcs
1082
1083 def make_bin_archive(prod_name, prod_info, where):
1084     '''Create an archive of a product by searching its source directory.
1085
1086     :param prod_name str: The name of the product.
1087     :param prod_info Config: The specific configuration corresponding to the 
1088                              product
1089     :param where str: The path of the repository where to put the resulting 
1090                       archive
1091     :return: The path of the resulting archive
1092     :rtype: str
1093     '''
1094     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1095     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1096     bin_path = prod_info.install_dir
1097     tar_prod.add(bin_path, arcname=path_targz_prod)
1098     tar_prod.close()
1099     return path_targz_prod       
1100
1101 def make_archive(prod_name, prod_info, where):
1102     '''Create an archive of a product by searching its source directory.
1103
1104     :param prod_name str: The name of the product.
1105     :param prod_info Config: The specific configuration corresponding to the 
1106                              product
1107     :param where str: The path of the repository where to put the resulting 
1108                       archive
1109     :return: The path of the resulting archive
1110     :rtype: str
1111     '''
1112     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1113     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1114     local_path = prod_info.source_dir
1115     if old_python:
1116         tar_prod.add(local_path,
1117                      arcname=prod_name,
1118                      exclude=exclude_VCS_and_extensions_26)
1119     else:
1120         tar_prod.add(local_path,
1121                      arcname=prod_name,
1122                      filter=exclude_VCS_and_extensions)
1123     tar_prod.close()
1124     return path_targz_prod       
1125
1126 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
1127     '''Create a specific project for a source package.
1128
1129     :param config Config: The global configuration.
1130     :param tmp_working_dir str: The temporary local directory containing some 
1131                                 specific directories or files needed in the 
1132                                 source package
1133     :param with_vcs boolean: True if the package is with vcs products (not 
1134                              transformed into archive products)
1135     :param with_ftp boolean: True if the package use ftp servers to get archives
1136     :return: The dictionary 
1137              {"project" : (produced project, project path in the archive)}
1138     :rtype: Dict
1139     '''
1140
1141     # Create in the working temporary directory the full project tree
1142     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1143     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
1144                                          "products")
1145     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
1146                                          "products",
1147                                          "compil_scripts")
1148     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
1149                                          "products",
1150                                          "env_scripts")
1151     patches_tmp_dir = os.path.join(project_tmp_dir,
1152                                          "products",
1153                                          "patches")
1154     application_tmp_dir = os.path.join(project_tmp_dir,
1155                                          "applications")
1156     for directory in [project_tmp_dir,
1157                       compil_scripts_tmp_dir,
1158                       env_scripts_tmp_dir,
1159                       patches_tmp_dir,
1160                       application_tmp_dir]:
1161         src.ensure_path_exists(directory)
1162
1163     # Create the pyconf that contains the information of the project
1164     project_pyconf_name = "project.pyconf"        
1165     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1166     ff = open(project_pyconf_file, "w")
1167     ff.write(PROJECT_TEMPLATE)
1168     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1169         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
1170         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1171             ftp_path=ftp_path+":"+ftpserver
1172         ftp_path+='"'
1173         ff.write("# ftp servers where to search for prerequisite archives\n")
1174         ff.write(ftp_path)
1175     # add licence paths if any
1176     if len(config.PATHS.LICENCEPATH) > 0:  
1177         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1178         for path in config.PATHS.LICENCEPATH[1:]:
1179             licence_path=licence_path+":"+path
1180         licence_path+='"'
1181         ff.write("\n# Where to search for licences\n")
1182         ff.write(licence_path)
1183         
1184
1185     ff.close()
1186     
1187     # Loop over the products to get there pyconf and all the scripts 
1188     # (compilation, environment, patches)
1189     # and create the pyconf file to add to the project
1190     lproducts_name = config.APPLICATION.products.keys()
1191     l_products = src.product.get_products_infos(lproducts_name, config)
1192     for p_name, p_info in l_products:
1193         # skip product with property not_in_package set to yes
1194         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1195             continue  
1196         find_product_scripts_and_pyconf(p_name,
1197                                         p_info,
1198                                         config,
1199                                         with_vcs,
1200                                         compil_scripts_tmp_dir,
1201                                         env_scripts_tmp_dir,
1202                                         patches_tmp_dir,
1203                                         products_pyconf_tmp_dir)
1204     
1205     # for the application pyconf, we write directly the config
1206     # don't search for the original pyconf file
1207     # to avoid problems with overwrite sections and rm_products key
1208     write_application_pyconf(config, application_tmp_dir)
1209     
1210     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1211     return d_project
1212
1213 def find_product_scripts_and_pyconf(p_name,
1214                                     p_info,
1215                                     config,
1216                                     with_vcs,
1217                                     compil_scripts_tmp_dir,
1218                                     env_scripts_tmp_dir,
1219                                     patches_tmp_dir,
1220                                     products_pyconf_tmp_dir):
1221     '''Create a specific pyconf file for a given product. Get its environment 
1222        script, its compilation script and patches and put it in the temporary
1223        working directory. This method is used in the source package in order to
1224        construct the specific project.
1225
1226     :param p_name str: The name of the product.
1227     :param p_info Config: The specific configuration corresponding to the 
1228                              product
1229     :param config Config: The global configuration.
1230     :param with_vcs boolean: True if the package is with vcs products (not 
1231                              transformed into archive products)
1232     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1233                                        scripts directory of the project.
1234     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1235                                     directory of the project.
1236     :param patches_tmp_dir str: The path to the temporary patch scripts 
1237                                 directory of the project.
1238     :param products_pyconf_tmp_dir str: The path to the temporary product 
1239                                         scripts directory of the project.
1240     '''
1241     
1242     # read the pyconf of the product
1243     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1244
1245     # find the compilation script if any
1246     if src.product.product_has_script(p_info):
1247         compil_script_path = src.Path(p_info.compil_script)
1248         compil_script_path.copy(compil_scripts_tmp_dir)
1249
1250     # find the environment script if any
1251     if src.product.product_has_env_script(p_info):
1252         env_script_path = src.Path(p_info.environ.env_script)
1253         env_script_path.copy(env_scripts_tmp_dir)
1254
1255     # find the patches if any
1256     if src.product.product_has_patches(p_info):
1257         patches = src.pyconf.Sequence()
1258         for patch_path in p_info.patches:
1259             p_path = src.Path(patch_path)
1260             p_path.copy(patches_tmp_dir)
1261             patches.append(os.path.basename(patch_path), "")
1262
1263     if (not with_vcs) and src.product.product_is_vcs(p_info):
1264         # in non vcs mode, if the product is not archive, then make it become archive.
1265
1266         # depending upon the incremental mode, select impacted sections
1267         if "properties" in p_info and "incremental" in p_info.properties and\
1268             p_info.properties.incremental == "yes":
1269             sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1270         else:
1271             sections = [p_info.section]
1272         for section in sections:
1273             if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1274                 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1275                           (p_name,section))
1276                 product_pyconf_cfg[section].get_source = "archive"
1277                 if not "archive_info" in product_pyconf_cfg[section]:
1278                     product_pyconf_cfg[section].addMapping("archive_info",
1279                                         src.pyconf.Mapping(product_pyconf_cfg),
1280                                         "")
1281                     product_pyconf_cfg[section].archive_info.archive_name =\
1282                         p_info.name + ".tgz"
1283     
1284     if (with_vcs) and src.product.product_is_vcs(p_info):
1285         # in vcs mode we must replace explicitely the git server url
1286         # (or it will not be found later because project files are not exported in archives)
1287         for section in product_pyconf_cfg:
1288             # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
1289             if "git_info" in product_pyconf_cfg[section]:
1290                 for repo in product_pyconf_cfg[section].git_info:
1291                     if repo in p_info.git_info:
1292                         product_pyconf_cfg[section].git_info[repo] =  p_info.git_info[repo]
1293
1294     # write the pyconf file to the temporary project location
1295     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1296                                            p_name + ".pyconf")
1297     ff = open(product_tmp_pyconf_path, 'w')
1298     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1299     product_pyconf_cfg.__save__(ff, 1)
1300     ff.close()
1301
1302
1303 def write_application_pyconf(config, application_tmp_dir):
1304     '''Write the application pyconf file in the specific temporary 
1305        directory containing the specific project of a source package.
1306
1307     :param config Config: The global configuration.
1308     :param application_tmp_dir str: The path to the temporary application 
1309                                     scripts directory of the project.
1310     '''
1311     application_name = config.VARS.application
1312     # write the pyconf file to the temporary application location
1313     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1314                                                application_name + ".pyconf")
1315     with open(application_tmp_pyconf_path, 'w') as f:
1316         f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1317         res = src.pyconf.Config()
1318         app = src.pyconf.deepCopyMapping(config.APPLICATION)
1319
1320         # set base mode to "no" for the archive
1321         app.base = "no"
1322
1323         # Change the workdir
1324         app.workdir = src.pyconf.Reference(
1325                                  app,
1326                                  src.pyconf.DOLLAR,
1327                                  'VARS.salometoolsway + $VARS.sep + ".."')
1328         res.addMapping("APPLICATION", app, "")
1329         res.__save__(f, evaluated=False)
1330     
1331
1332 def sat_package(config, tmp_working_dir, options, logger):
1333     '''Prepare a dictionary that stores all the needed directories and files to
1334        add in a salomeTool package.
1335     
1336     :param tmp_working_dir str: The temporary local working directory 
1337     :param options OptResult: the options of the launched command
1338     :return: the dictionary that stores all the needed directories and files to
1339              add in a salomeTool package.
1340              {label : (path_on_local_machine, path_in_archive)}
1341     :rtype: dict
1342     '''
1343     d_project = {}
1344
1345     # we include sat himself
1346     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1347
1348     # and we overwrite local.pyconf with a clean wersion.
1349     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1350     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1351     local_cfg = src.pyconf.Config(local_file_path)
1352     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1353     local_cfg.LOCAL["base"] = "default"
1354     local_cfg.LOCAL["workdir"] = "default"
1355     local_cfg.LOCAL["log_dir"] = "default"
1356     local_cfg.LOCAL["archive_dir"] = "default"
1357     local_cfg.LOCAL["VCS"] = "None"
1358     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1359
1360     # if the archive contains a project, we write its relative path in local.pyconf
1361     if options.project:
1362         project_arch_path = os.path.join("projects", options.project, 
1363                                          os.path.basename(options.project_file_path))
1364         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1365
1366     ff = open(local_pyconf_tmp_path, 'w')
1367     local_cfg.__save__(ff, 1)
1368     ff.close()
1369     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1370     return d_project
1371     
1372
1373 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1374     '''Prepare a dictionary that stores all the needed directories and files to
1375        add in a project package.
1376     
1377     :param project_file_path str: The path to the local project.
1378     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1379     :param tmp_working_dir str: The temporary local directory containing some 
1380                                 specific directories or files needed in the 
1381                                 project package
1382     :param embedded_in_sat boolean : the project package is embedded in a sat package
1383     :return: the dictionary that stores all the needed directories and files to
1384              add in a project package.
1385              {label : (path_on_local_machine, path_in_archive)}
1386     :rtype: dict
1387     '''
1388     d_project = {}
1389     # Read the project file and get the directories to add to the package
1390     
1391     try: 
1392       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1393     except:
1394       logger.write("""
1395 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1396       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1397       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1398     
1399     paths = {"APPLICATIONPATH" : "applications",
1400              "PRODUCTPATH" : "products",
1401              "JOBPATH" : "jobs",
1402              "MACHINEPATH" : "machines"}
1403     if not ftp_mode:
1404         paths["ARCHIVEPATH"] = "archives"
1405
1406     # Loop over the project paths and add it
1407     project_file_name = os.path.basename(project_file_path)
1408     for path in paths:
1409         if path not in project_pyconf_cfg:
1410             continue
1411         if embedded_in_sat:
1412             dest_path = os.path.join("projects", name_project, paths[path])
1413             project_file_dest = os.path.join("projects", name_project, project_file_name)
1414         else:
1415             dest_path = paths[path]
1416             project_file_dest = project_file_name
1417
1418         # Add the directory to the files to add in the package
1419         d_project[path] = (project_pyconf_cfg[path], dest_path)
1420
1421         # Modify the value of the path in the package
1422         project_pyconf_cfg[path] = src.pyconf.Reference(
1423                                     project_pyconf_cfg,
1424                                     src.pyconf.DOLLAR,
1425                                     'project_path + "/' + paths[path] + '"')
1426     
1427     # Modify some values
1428     if "project_path" not in project_pyconf_cfg:
1429         project_pyconf_cfg.addMapping("project_path",
1430                                       src.pyconf.Mapping(project_pyconf_cfg),
1431                                       "")
1432     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1433                                                            src.pyconf.DOLLAR,
1434                                                            'PWD')
1435     # we don't want to export these two fields
1436     project_pyconf_cfg.__delitem__("file_path")
1437     project_pyconf_cfg.__delitem__("PWD")
1438     if ftp_mode:
1439         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1440     
1441     # Write the project pyconf file
1442     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1443     ff = open(project_pyconf_tmp_path, 'w')
1444     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1445     project_pyconf_cfg.__save__(ff, 1)
1446     ff.close()
1447     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1448     
1449     return d_project
1450
1451 def add_readme(config, options, where):
1452     readme_path = os.path.join(where, "README")
1453     with codecs.open(readme_path, "w", 'utf-8') as f:
1454
1455     # templates for building the header
1456         readme_header="""
1457 # This package was generated with sat $version
1458 # Date: $date
1459 # User: $user
1460 # Distribution : $dist
1461
1462 In the following, $$ROOT represents the directory where you have installed 
1463 SALOME (the directory where this file is located).
1464
1465 """
1466         if src.architecture.is_windows():
1467             readme_header = readme_header.replace('$$ROOT','%ROOT%')
1468         readme_compilation_with_binaries="""
1469
1470 compilation based on the binaries used as prerequisites
1471 =======================================================
1472
1473 If you fail to compile the complete application (for example because
1474 you are not root on your system and cannot install missing packages), you
1475 may try a partial compilation based on the binaries.
1476 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1477 and do some substitutions on cmake and .la files (replace the build directories
1478 with local paths).
1479 The procedure to do it is:
1480  1) Remove or rename INSTALL directory if it exists
1481  2) Execute the shell script install_bin.sh:
1482  > cd $ROOT
1483  > ./install_bin.sh
1484  3) Use SalomeTool (as explained in Sources section) and compile only the 
1485     modules you need to (with -p option)
1486
1487 """
1488         readme_header_tpl=string.Template(readme_header)
1489         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1490                 "README_BIN.template")
1491         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1492                 "README_LAUNCHER.template")
1493         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1494                 "README_BIN_VIRTUAL_APP.template")
1495         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1496                 "README_SRC.template")
1497         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1498                 "README_PROJECT.template")
1499         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1500                 "README_SAT.template")
1501
1502         # prepare substitution dictionary
1503         d = dict()
1504         d['user'] = config.VARS.user
1505         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1506         d['version'] = src.get_salometool_version(config)
1507         d['dist'] = config.VARS.dist
1508         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1509
1510         if options.binaries or options.sources:
1511             d['application'] = config.VARS.application
1512             d['BINARIES']    = config.INTERNAL.config.binary_dir
1513             d['SEPARATOR'] = config.VARS.sep
1514             if src.architecture.is_windows():
1515                 d['operatingSystem'] = 'Windows'
1516                 d['PYTHON3'] = 'python3'
1517                 d['ROOT']    = '%ROOT%'
1518             else:
1519                 d['operatingSystem'] = 'Linux'
1520                 d['PYTHON3'] = ''
1521                 d['ROOT']    = '$ROOT'
1522             f.write("# Application: " + d['application'] + "\n")
1523             if 'KERNEL' in config.APPLICATION.products:
1524                 VersionSalome = src.get_salome_version(config)
1525                 # Case where SALOME has the launcher that uses the SalomeContext API
1526                 if VersionSalome >= 730:
1527                     d['launcher'] = config.APPLICATION.profile.launcher_name
1528                 else:
1529                     d['virtual_app'] = 'runAppli' # this info is not used now)
1530
1531         # write the specific sections
1532         if options.binaries:
1533             f.write(src.template.substitute(readme_template_path_bin, d))
1534             if "virtual_app" in d:
1535                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1536             if "launcher" in d:
1537                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1538
1539         if options.sources:
1540             f.write(src.template.substitute(readme_template_path_src, d))
1541
1542         if options.binaries and options.sources and not src.architecture.is_windows():
1543             f.write(readme_compilation_with_binaries)
1544
1545         if options.project:
1546             f.write(src.template.substitute(readme_template_path_pro, d))
1547
1548         if options.sat:
1549             f.write(src.template.substitute(readme_template_path_sat, d))
1550     
1551     return readme_path
1552
1553 def update_config(config, logger,  prop, value):
1554     '''Remove from config.APPLICATION.products the products that have the property given as input.
1555     
1556     :param config Config: The global config.
1557     :param prop str: The property to filter
1558     :param value str: The value of the property to filter
1559     '''
1560     # if there is no APPLICATION (ex sat package -t) : nothing to do
1561     if "APPLICATION" in config:
1562         l_product_to_remove = []
1563         for product_name in config.APPLICATION.products.keys():
1564             prod_cfg = src.product.get_product_config(config, product_name)
1565             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1566                 l_product_to_remove.append(product_name)
1567         for product_name in l_product_to_remove:
1568             config.APPLICATION.products.__delitem__(product_name)
1569             logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1570
1571 def description():
1572     '''method that is called when salomeTools is called with --help option.
1573     
1574     :return: The text to display for the package command description.
1575     :rtype: str
1576     '''
1577     return _("""
1578 The package command creates a tar file archive of a product.
1579 There are four kinds of archive, which can be mixed:
1580
1581  1 - The binary archive. 
1582      It contains the product installation directories plus a launcher.
1583  2 - The sources archive. 
1584      It contains the product archives, a project (the application plus salomeTools).
1585  3 - The project archive. 
1586      It contains a project (give the project file path as argument).
1587  4 - The salomeTools archive. 
1588      It contains code utility salomeTools.
1589
1590 example:
1591  >> sat package SALOME-master --binaries --sources""")
1592   
1593 def run(args, runner, logger):
1594     '''method that is called when salomeTools is called with package parameter.
1595     '''
1596     
1597     # Parse the options
1598     (options, args) = parser.parse_args(args)
1599
1600     
1601     # Check that a type of package is called, and only one
1602     all_option_types = (options.binaries,
1603                         options.sources,
1604                         options.project not in ["", None],
1605                         options.sat,
1606                         options.bin_products)
1607
1608     # Check if no option for package type
1609     if all_option_types.count(True) == 0:
1610         msg = _("Error: Precise a type for the package\nUse one of the "
1611                 "following options: --binaries, --sources, --project or"
1612                 " --salometools, --bin_products")
1613         logger.write(src.printcolors.printcError(msg), 1)
1614         logger.write("\n", 1)
1615         return 1
1616     do_create_package = options.binaries or options.sources or options.project or options.sat 
1617
1618     if options.bin_products:
1619         ret = bin_products_archives(runner.cfg, logger)
1620         if ret!=0:
1621             return ret
1622     if not do_create_package:
1623         return 0
1624
1625     # continue to create a tar.gz package 
1626
1627     # The repository where to put the package if not Binary or Source
1628     package_default_path = runner.cfg.LOCAL.workdir
1629     # if the package contains binaries or sources:
1630     if options.binaries or options.sources or options.bin_products:
1631         # Check that the command has been called with an application
1632         src.check_config_has_application(runner.cfg)
1633
1634         # Display information
1635         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1636                                                     runner.cfg.VARS.application), 1)
1637         
1638         # Get the default directory where to put the packages
1639         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1640         src.ensure_path_exists(package_default_path)
1641         
1642     # if the package contains a project:
1643     if options.project:
1644         # check that the project is visible by SAT
1645         projectNameFile = options.project + ".pyconf"
1646         foundProject = None
1647         for i in runner.cfg.PROJECTS.project_file_paths:
1648             baseName = os.path.basename(i)
1649             if baseName == projectNameFile:
1650                 foundProject = i
1651                 break
1652
1653         if foundProject is None:
1654             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1655             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1656 known projects are:
1657 %(2)s
1658
1659 Please add it in file:
1660 %(3)s""" % \
1661                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1662             logger.write(src.printcolors.printcError(msg), 1)
1663             logger.write("\n", 1)
1664             return 1
1665         else:
1666             options.project_file_path = foundProject
1667             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1668     
1669     # Remove the products that are filtered by the --without_properties option
1670     if options.without_properties:
1671         prop, value = options.without_properties
1672         update_config(runner.cfg, logger, prop, value)
1673
1674     # Remove from config the products that have the not_in_package property
1675     update_config(runner.cfg, logger, "not_in_package", "yes")
1676
1677     # get the name of the archive or build it
1678     if options.name:
1679         if os.path.basename(options.name) == options.name:
1680             # only a name (not a path)
1681             archive_name = options.name           
1682             dir_name = package_default_path
1683         else:
1684             archive_name = os.path.basename(options.name)
1685             dir_name = os.path.dirname(options.name)
1686         
1687         # suppress extension
1688         if archive_name[-len(".tgz"):] == ".tgz":
1689             archive_name = archive_name[:-len(".tgz")]
1690         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1691             archive_name = archive_name[:-len(".tar.gz")]
1692         
1693     else:
1694         archive_name=""
1695         dir_name = package_default_path
1696         if options.binaries or options.sources:
1697             archive_name = runner.cfg.APPLICATION.name
1698
1699         if options.binaries:
1700             archive_name += "-"+runner.cfg.VARS.dist
1701             
1702         if options.sources:
1703             archive_name += "-SRC"
1704             if options.with_vcs:
1705                 archive_name += "-VCS"
1706
1707         if options.sat:
1708             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1709
1710         if options.project:
1711             if options.sat:
1712                 archive_name += "_" 
1713             archive_name += ("satproject_" + options.project)
1714  
1715         if len(archive_name)==0: # no option worked 
1716             msg = _("Error: Cannot name the archive\n"
1717                     " check if at least one of the following options was "
1718                     "selected : --binaries, --sources, --project or"
1719                     " --salometools")
1720             logger.write(src.printcolors.printcError(msg), 1)
1721             logger.write("\n", 1)
1722             return 1
1723  
1724     path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1725     
1726     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1727
1728     # Create a working directory for all files that are produced during the
1729     # package creation and that will be removed at the end of the command
1730     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1731     src.ensure_path_exists(tmp_working_dir)
1732     logger.write("\n", 5)
1733     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1734     
1735     logger.write("\n", 3)
1736
1737     msg = _("Preparation of files to add to the archive")
1738     logger.write(src.printcolors.printcLabel(msg), 2)
1739     logger.write("\n", 2)
1740     
1741     d_files_to_add={}  # content of the archive
1742
1743     # a dict to hold paths that will need to be substitute for users recompilations
1744     d_paths_to_substitute={}  
1745
1746     if options.binaries:
1747         d_bin_files_to_add = binary_package(runner.cfg,
1748                                             logger,
1749                                             options,
1750                                             tmp_working_dir)
1751         # for all binaries dir, store the substitution that will be required 
1752         # for extra compilations
1753         for key in d_bin_files_to_add:
1754             if key.endswith("(bin)"):
1755                 source_dir = d_bin_files_to_add[key][0]
1756                 path_in_archive = d_bin_files_to_add[key][1].replace(
1757                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1758                    runner.cfg.INTERNAL.config.install_dir)
1759                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1760                     # if basename is the same we will just substitute the dirname 
1761                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1762                         os.path.dirname(path_in_archive)
1763                 else:
1764                     d_paths_to_substitute[source_dir]=path_in_archive
1765
1766         d_files_to_add.update(d_bin_files_to_add)
1767     if options.sources:
1768         d_files_to_add.update(source_package(runner,
1769                                         runner.cfg,
1770                                         logger, 
1771                                         options,
1772                                         tmp_working_dir))
1773         if options.binaries:
1774             # for archives with bin and sources we provide a shell script able to 
1775             # install binaries for compilation
1776             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1777                                                       tmp_working_dir,
1778                                                       d_paths_to_substitute,
1779                                                       "install_bin.sh")
1780             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1781             logger.write("substitutions that need to be done later : \n", 5)
1782             logger.write(str(d_paths_to_substitute), 5)
1783             logger.write("\n", 5)
1784     else:
1785         # --salomeTool option is not considered when --sources is selected, as this option
1786         # already brings salomeTool!
1787         if options.sat:
1788             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1789                                   options, logger))
1790         
1791     if options.project:
1792         DBG.write("config for package %s" % options.project, runner.cfg)
1793         d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1794
1795     if not(d_files_to_add):
1796         msg = _("Error: Empty dictionnary to build the archive!\n")
1797         logger.write(src.printcolors.printcError(msg), 1)
1798         logger.write("\n", 1)
1799         return 1
1800
1801     # Add the README file in the package
1802     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1803     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1804
1805     # Add the additional files of option add_files
1806     if options.add_files:
1807         for file_path in options.add_files:
1808             if not os.path.exists(file_path):
1809                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1810                 continue
1811             file_name = os.path.basename(file_path)
1812             d_files_to_add[file_name] = (file_path, file_name)
1813
1814     logger.write("\n", 2)
1815     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1816     logger.write("\n", 2)
1817     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1818
1819     res = 0
1820     try:
1821         # Creating the object tarfile
1822         tar = tarfile.open(path_targz, mode='w:gz')
1823         
1824         # get the filtering function if needed
1825         if old_python:
1826             filter_function = exclude_VCS_and_extensions_26
1827         else:
1828             filter_function = exclude_VCS_and_extensions
1829
1830         # Add the files to the tarfile object
1831         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1832         tar.close()
1833     except KeyboardInterrupt:
1834         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1835         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1836         # remove the working directory
1837         shutil.rmtree(tmp_working_dir)
1838         logger.write(_("OK"), 1)
1839         logger.write(_("\n"), 1)
1840         return 1
1841     
1842     # case if no application, only package sat as 'sat package -t'
1843     try:
1844         app = runner.cfg.APPLICATION
1845     except:
1846         app = None
1847
1848     # unconditionaly remove the tmp_local_working_dir
1849     if app is not None:
1850         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1851         if os.path.isdir(tmp_local_working_dir):
1852             shutil.rmtree(tmp_local_working_dir)
1853
1854     # remove the tmp directory, unless user has registered as developer
1855     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1856         shutil.rmtree(tmp_working_dir)
1857     
1858     # Print again the path of the package
1859     logger.write("\n", 2)
1860     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1861     
1862     return res