Salome HOME
option bin_products part 1
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28 import sys
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
35
36 BINARY = "binary"
37 SOURCE = "Source"
38 PROJECT = "Project"
39 SAT = "Sat"
40
41 ARCHIVE_DIR = "ARCHIVES"
42 PROJECT_DIR = "PROJECT"
43
44 IGNORED_DIRS = [".git", ".svn"]
45 IGNORED_EXTENSIONS = []
46
47 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
48
49 PROJECT_TEMPLATE = """#!/usr/bin/env python
50 #-*- coding:utf-8 -*-
51
52 # The path to the archive root directory
53 root_path : $PWD + "/../"
54 # path to the PROJECT
55 project_path : $PWD + "/"
56
57 # Where to search the archives of the products
58 ARCHIVEPATH : $root_path + "ARCHIVES"
59 # Where to search the pyconf of the applications
60 APPLICATIONPATH : $project_path + "applications/"
61 # Where to search the pyconf of the products
62 PRODUCTPATH : $project_path + "products/"
63 # Where to search the pyconf of the jobs of the project
64 JOBPATH : $project_path + "jobs/"
65 # Where to search the pyconf of the machines of the project
66 MACHINEPATH : $project_path + "machines/"
67 """
68
69 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
70 #-*- coding:utf-8 -*-
71
72   LOCAL :
73   {
74     base : 'default'
75     workdir : 'default'
76     log_dir : 'default'
77     archive_dir : 'default'
78     VCS : 'unknown'
79     tag : 'unknown'
80   }
81
82 PROJECTS :
83 {
84 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
85 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
86 }
87 """)
88
89 # Define all possible option for the package command :  sat package <options>
90 parser = src.options.Options()
91 parser.add_option('b', 'binaries', 'boolean', 'binaries',
92     _('Optional: Produce a binary package.'), False)
93 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
94     _('Optional: Only binary package: produce the archive even if '
95       'there are some missing products.'), False)
96 parser.add_option('s', 'sources', 'boolean', 'sources',
97     _('Optional: Produce a compilable archive of the sources of the '
98       'application.'), False)
99 parser.add_option('', 'bin_products', 'boolean', 'bin_products',
100     _('Optional: Create binary archives for all products.'), False)
101 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
102     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
103       'Sat prepare will use VCS mode instead to retrieve them'),
104     False)
105 parser.add_option('', 'ftp', 'boolean', 'ftp',
106     _('Optional: Do not embed archives for products in archive mode.' 
107     'Sat prepare will use ftp instead to retrieve them'),
108     False)
109 parser.add_option('e', 'exe', 'string', 'exe',
110     _('Optional: Produce an extra launcher based upon the exe given as argument.'), "")
111 parser.add_option('p', 'project', 'string', 'project',
112     _('Optional: Produce an archive that contains a project.'), "")
113 parser.add_option('t', 'salometools', 'boolean', 'sat',
114     _('Optional: Produce an archive that contains salomeTools.'), False)
115 parser.add_option('n', 'name', 'string', 'name',
116     _('Optional: The name or full path of the archive.'), None)
117 parser.add_option('', 'add_files', 'list2', 'add_files',
118     _('Optional: The list of additional files to add to the archive.'), [])
119 parser.add_option('', 'without_properties', 'properties', 'without_properties',
120     _('Optional: Filter the products by their properties.\n\tSyntax: '
121       '--without_properties <property>:<value>'))
122
123
124 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
125     '''Create an archive containing all directories and files that are given in
126        the d_content argument.
127     
128     :param tar tarfile: The tarfile instance used to make the archive.
129     :param name_archive str: The name of the archive to make.
130     :param d_content dict: The dictionary that contain all directories and files
131                            to add in the archive.
132                            d_content[label] = 
133                                         (path_on_local_machine, path_in_archive)
134     :param logger Logger: the logging instance
135     :param f_exclude Function: the function that filters
136     :return: 0 if success, 1 if not.
137     :rtype: int
138     '''
139     # get the max length of the messages in order to make the display
140     max_len = len(max(d_content.keys(), key=len))
141     
142     success = 0
143     # loop over each directory or file stored in the d_content dictionary
144     names = sorted(d_content.keys())
145     DBG.write("add tar names", names)
146
147     # used to avoid duplications (for pip install in python, or single_install_dir cases)
148     already_added=set() 
149     for name in names:
150         # display information
151         len_points = max_len - len(name) + 3
152         local_path, archive_path = d_content[name]
153         in_archive = os.path.join(name_archive, archive_path)
154         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
155         # Get the local path and the path in archive 
156         # of the directory or file to add
157         # Add it in the archive
158         try:
159             key=local_path+"->"+in_archive
160             if key not in already_added:
161                 if old_python:
162                     tar.add(local_path,
163                                  arcname=in_archive,
164                                  exclude=exclude_VCS_and_extensions_26)
165                 else:
166                     tar.add(local_path,
167                                  arcname=in_archive,
168                                  filter=exclude_VCS_and_extensions)
169                 already_added.add(key)
170             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
171         except Exception as e:
172             logger.write(src.printcolors.printcError(_("KO ")), 3)
173             logger.write(str(e), 3)
174             success = 1
175         logger.write("\n", 3)
176     return success
177
178
179 def exclude_VCS_and_extensions_26(filename):
180     ''' The function that is used to exclude from package the link to the 
181         VCS repositories (like .git) (only for python 2.6)
182
183     :param filename Str: The filname to exclude (or not).
184     :return: True if the file has to be exclude
185     :rtype: Boolean
186     '''
187     for dir_name in IGNORED_DIRS:
188         if dir_name in filename:
189             return True
190     for extension in IGNORED_EXTENSIONS:
191         if filename.endswith(extension):
192             return True
193     return False
194
195 def exclude_VCS_and_extensions(tarinfo):
196     ''' The function that is used to exclude from package the link to the 
197         VCS repositories (like .git)
198
199     :param filename Str: The filname to exclude (or not).
200     :return: None if the file has to be exclude
201     :rtype: tarinfo or None
202     '''
203     filename = tarinfo.name
204     for dir_name in IGNORED_DIRS:
205         if dir_name in filename:
206             return None
207     for extension in IGNORED_EXTENSIONS:
208         if filename.endswith(extension):
209             return None
210     return tarinfo
211
212 def produce_relative_launcher(config,
213                               logger,
214                               file_dir,
215                               file_name,
216                               binaries_dir_name):
217     '''Create a specific SALOME launcher for the binary package. This launcher 
218        uses relative paths.
219     
220     :param config Config: The global configuration.
221     :param logger Logger: the logging instance
222     :param file_dir str: the directory where to put the launcher
223     :param file_name str: The launcher name
224     :param binaries_dir_name str: the name of the repository where the binaries
225                                   are, in the archive.
226     :return: the path of the produced launcher
227     :rtype: str
228     '''
229     
230     # set base mode to "no" for the archive - save current mode to restore it at the end
231     if "base" in config.APPLICATION:
232         base_setting=config.APPLICATION.base 
233     else:
234         base_setting="maybe"
235     config.APPLICATION.base="no"
236
237     # get KERNEL installation path 
238     kernel_info = src.product.get_product_config(config, "KERNEL")
239     kernel_base_name=os.path.basename(kernel_info.install_dir)
240     if kernel_info.install_mode == "base":
241         # case of kernel installed in base. the kernel install dir name is different in the archive
242         kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
243     
244     kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
245
246     # set kernel bin dir (considering fhs property)
247     kernel_cfg = src.product.get_product_config(config, "KERNEL")
248     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
249         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
250     else:
251         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
252
253     # check if the application contains an application module
254     # check also if the application has a distene product, 
255     # in this case get its licence file name
256     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
257     salome_application_name="Not defined" 
258     distene_licence_file_name=False
259     for prod_name, prod_info in l_product_info:
260         # look for a "salome application" and a distene product
261         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
262             distene_licence_file_name = src.product.product_has_licence(prod_info, 
263                                             config.PATHS.LICENCEPATH) 
264         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
265             salome_application_name=prod_info.name
266
267     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
268     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
269     if salome_application_name == "Not defined":
270         app_root_dir=kernel_root_dir
271     else:
272         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
273
274     additional_env={}
275     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
276                                                    config.VARS.sep + bin_kernel_install_dir
277     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
278         additional_env['sat_python_version'] = 3
279     else:
280         additional_env['sat_python_version'] = 2
281
282     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
283
284     # create an environment file writer
285     writer = src.environment.FileEnvWriter(config,
286                                            logger,
287                                            file_dir,
288                                            src_root=None,
289                                            env_info=None)
290     
291     filepath = os.path.join(file_dir, file_name)
292     # Write
293     writer.write_env_file(filepath,
294                           False,  # for launch
295                           "cfgForPy",
296                           additional_env=additional_env,
297                           no_path_init="False",
298                           for_package = binaries_dir_name)
299     
300     # Little hack to put out_dir_Path outside the strings
301     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
302     src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
303     
304     # A hack to put a call to a file for distene licence.
305     # It does nothing to an application that has no distene product
306     if distene_licence_file_name:
307         logger.write("Application has a distene licence file! We use it in package launcher", 5)
308         hack_for_distene_licence(filepath, distene_licence_file_name)
309        
310     # change the rights in order to make the file executable for everybody
311     os.chmod(filepath,
312              stat.S_IRUSR |
313              stat.S_IRGRP |
314              stat.S_IROTH |
315              stat.S_IWUSR |
316              stat.S_IXUSR |
317              stat.S_IXGRP |
318              stat.S_IXOTH)
319
320     # restore modified setting by its initial value
321     config.APPLICATION.base=base_setting
322
323     return filepath
324
325 def hack_for_distene_licence(filepath, licence_file):
326     '''Replace the distene licence env variable by a call to a file.
327     
328     :param filepath Str: The path to the launcher to modify.
329     '''  
330     shutil.move(filepath, filepath + "_old")
331     fileout= filepath
332     filein = filepath + "_old"
333     fin = open(filein, "r")
334     fout = open(fileout, "w")
335     text = fin.readlines()
336     # Find the Distene section
337     num_line = -1
338     for i,line in enumerate(text):
339         if "# Set DISTENE License" in line:
340             num_line = i
341             break
342     if num_line == -1:
343         # No distene product, there is nothing to do
344         fin.close()
345         for line in text:
346             fout.write(line)
347         fout.close()
348         return
349     del text[num_line +1]
350     del text[num_line +1]
351     text_to_insert ="""    try:
352         distene_licence_file=r"%s"
353         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
354             import importlib.util
355             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
356             distene=importlib.util.module_from_spec(spec_dist)
357             spec_dist.loader.exec_module(distene)
358         else:
359             import imp
360             distene = imp.load_source('distene_licence', distene_licence_file)
361         distene.set_distene_variables(context)
362     except:
363         pass\n"""  % licence_file
364     text.insert(num_line + 1, text_to_insert)
365     for line in text:
366         fout.write(line)
367     fin.close()    
368     fout.close()
369     return
370     
371 def produce_relative_env_files(config,
372                               logger,
373                               file_dir,
374                               binaries_dir_name,
375                               exe_name=None):
376     '''Create some specific environment files for the binary package. These 
377        files use relative paths.
378     
379     :param config Config: The global configuration.
380     :param logger Logger: the logging instance
381     :param file_dir str: the directory where to put the files
382     :param binaries_dir_name str: the name of the repository where the binaries
383                                   are, in the archive.
384     :param exe_name str: if given generate a launcher executing exe_name
385     :return: the list of path of the produced environment files
386     :rtype: List
387     '''  
388
389     # set base mode to "no" for the archive - save current mode to restore it at the end
390     if "base" in config.APPLICATION:
391         base_setting=config.APPLICATION.base 
392     else:
393         base_setting="maybe"
394     config.APPLICATION.base="no"
395
396     # create an environment file writer
397     writer = src.environment.FileEnvWriter(config,
398                                            logger,
399                                            file_dir,
400                                            src_root=None)
401     
402     if src.architecture.is_windows():
403       shell = "bat"
404       filename  = "env_launch.bat"
405     else:
406       shell = "bash"
407       filename  = "env_launch.sh"
408
409     if exe_name:
410         filename=os.path.basename(exe_name)
411
412     # Write
413     filepath = writer.write_env_file(filename,
414                           False, # for launch
415                           shell,
416                           for_package = binaries_dir_name)
417
418     # Little hack to put out_dir_Path as environment variable
419     if src.architecture.is_windows() :
420       src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
421       src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
422       src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
423     else:
424       src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
425       src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
426
427     if exe_name:
428         if src.architecture.is_windows():
429             cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
430         else:
431             cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
432         with open(filepath, "a") as exe_launcher:
433             exe_launcher.write(cmd)
434
435     # change the rights in order to make the file executable for everybody
436     os.chmod(filepath,
437              stat.S_IRUSR |
438              stat.S_IRGRP |
439              stat.S_IROTH |
440              stat.S_IWUSR |
441              stat.S_IXUSR |
442              stat.S_IXGRP |
443              stat.S_IXOTH)
444     
445     # restore modified setting by its initial value
446     config.APPLICATION.base=base_setting
447
448     return filepath
449
450 def produce_install_bin_file(config,
451                              logger,
452                              file_dir,
453                              d_sub,
454                              file_name):
455     '''Create a bash shell script which do substitutions in BIRARIES dir 
456        in order to use it for extra compilations.
457     
458     :param config Config: The global configuration.
459     :param logger Logger: the logging instance
460     :param file_dir str: the directory where to put the files
461     :param d_sub, dict: the dictionnary that contains the substitutions to be done
462     :param file_name str: the name of the install script file
463     :return: the produced file
464     :rtype: str
465     '''  
466     # Write
467     filepath = os.path.join(file_dir, file_name)
468     # open the file and write into it
469     # use codec utf-8 as sat variables are in unicode
470     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
471         installbin_template_path = os.path.join(config.VARS.internal_dir,
472                                         "INSTALL_BIN.template")
473         
474         # build the name of the directory that will contain the binaries
475         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
476         # build the substitution loop
477         loop_cmd = "for f in $(grep -RIl"
478         for key in d_sub:
479             loop_cmd += " -e "+ key
480         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
481                     '); do\n     sed -i "\n'
482         for key in d_sub:
483             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
484         loop_cmd += '            " $f\ndone'
485
486         d={}
487         d["BINARIES_DIR"] = binaries_dir_name
488         d["SUBSTITUTION_LOOP"]=loop_cmd
489         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
490         
491         # substitute the template and write it in file
492         content=src.template.substitute(installbin_template_path, d)
493         installbin_file.write(content)
494         # change the rights in order to make the file executable for everybody
495         os.chmod(filepath,
496                  stat.S_IRUSR |
497                  stat.S_IRGRP |
498                  stat.S_IROTH |
499                  stat.S_IWUSR |
500                  stat.S_IXUSR |
501                  stat.S_IXGRP |
502                  stat.S_IXOTH)
503     
504     return filepath
505
506 def product_appli_creation_script(config,
507                                   logger,
508                                   file_dir,
509                                   binaries_dir_name):
510     '''Create a script that can produce an application (EDF style) in the binary
511        package.
512     
513     :param config Config: The global configuration.
514     :param logger Logger: the logging instance
515     :param file_dir str: the directory where to put the file
516     :param binaries_dir_name str: the name of the repository where the binaries
517                                   are, in the archive.
518     :return: the path of the produced script file
519     :rtype: Str
520     '''
521     template_name = "create_appli.py.for_bin_packages.template"
522     template_path = os.path.join(config.VARS.internal_dir, template_name)
523     text_to_fill = open(template_path, "r").read()
524     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
525                                         '"' + binaries_dir_name + '"')
526     
527     text_to_add = ""
528     for product_name in get_SALOME_modules(config):
529         product_info = src.product.get_product_config(config, product_name)
530        
531         if src.product.product_is_smesh_plugin(product_info):
532             continue
533
534         if 'install_dir' in product_info and bool(product_info.install_dir):
535             if src.product.product_is_cpp(product_info):
536                 # cpp module
537                 for cpp_name in src.product.get_product_components(product_info):
538                     line_to_add = ("<module name=\"" + 
539                                    cpp_name + 
540                                    "\" gui=\"yes\" path=\"''' + "
541                                    "os.path.join(dir_bin_name, \"" + 
542                                    cpp_name + "\") + '''\"/>")
543             else:
544                 # regular module
545                 line_to_add = ("<module name=\"" + 
546                                product_name + 
547                                "\" gui=\"yes\" path=\"''' + "
548                                "os.path.join(dir_bin_name, \"" + 
549                                product_name + "\") + '''\"/>")
550             text_to_add += line_to_add + "\n"
551     
552     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
553     
554     tmp_file_path = os.path.join(file_dir, "create_appli.py")
555     ff = open(tmp_file_path, "w")
556     ff.write(filled_text)
557     ff.close()
558     
559     # change the rights in order to make the file executable for everybody
560     os.chmod(tmp_file_path,
561              stat.S_IRUSR |
562              stat.S_IRGRP |
563              stat.S_IROTH |
564              stat.S_IWUSR |
565              stat.S_IXUSR |
566              stat.S_IXGRP |
567              stat.S_IXOTH)
568     
569     return tmp_file_path
570
571 def bin_products_archives(config):
572     '''Prepare binary packages for all products
573     :param config Config: The global configuration.
574     :return: the error status
575     :rtype: bool
576     '''
577
578     print ("CNC bin_products_archives!!")
579     # Get the list of product installation to add to the archive
580     l_products_name = sorted(config.APPLICATION.products.keys())
581     l_product_info = src.product.get_products_infos(l_products_name,
582                                                     config)
583     # first loop on products : filter products, analyse properties,
584     # and store the information that will be used to create the archive in the second loop 
585     for prod_name, prod_info in l_product_info:
586         # ignore the native and fixed products for install directories
587         if (src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
588                 or src.product.product_is_native(prod_info) 
589                 or src.product.product_is_fixed(prod_info)
590                 or not src.product.product_compiles(prod_info)):
591             continue
592         print ("CNC produce bin archive for ", prod_name)
593     return 0
594
595 def binary_package(config, logger, options, tmp_working_dir):
596     '''Prepare a dictionary that stores all the needed directories and files to
597        add in a binary package.
598     
599     :param config Config: The global configuration.
600     :param logger Logger: the logging instance
601     :param options OptResult: the options of the launched command
602     :param tmp_working_dir str: The temporary local directory containing some 
603                                 specific directories or files needed in the 
604                                 binary package
605     :return: the dictionary that stores all the needed directories and files to
606              add in a binary package.
607              {label : (path_on_local_machine, path_in_archive)}
608     :rtype: dict
609     '''
610
611     # Get the list of product installation to add to the archive
612     l_products_name = sorted(config.APPLICATION.products.keys())
613     l_product_info = src.product.get_products_infos(l_products_name,
614                                                     config)
615
616     # suppress compile time products for binaries-only archives
617     if not options.sources:
618         update_config(config, logger, "compile_time", "yes")
619
620     l_install_dir = []
621     l_source_dir = []
622     l_not_installed = []
623     l_sources_not_present = []
624     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
625     if ("APPLICATION" in config  and
626         "properties"  in config.APPLICATION  and
627         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
628         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
629             generate_mesa_launcher=True
630
631     # first loop on products : filter products, analyse properties,
632     # and store the information that will be used to create the archive in the second loop 
633     for prod_name, prod_info in l_product_info:
634         # skip product with property not_in_package set to yes
635         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
636             continue  
637
638         # Add the sources of the products that have the property 
639         # sources_in_package : "yes"
640         if src.get_property_in_product_cfg(prod_info,
641                                            "sources_in_package") == "yes":
642             if os.path.exists(prod_info.source_dir):
643                 l_source_dir.append((prod_name, prod_info.source_dir))
644             else:
645                 l_sources_not_present.append(prod_name)
646
647         # ignore the native and fixed products for install directories
648         if (src.product.product_is_native(prod_info) 
649                 or src.product.product_is_fixed(prod_info)
650                 or not src.product.product_compiles(prod_info)):
651             continue
652         # 
653         # products with single_dir property will be installed in the PRODUCTS directory of the archive
654         is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
655                        src.product.product_test_property(prod_info,"single_install_dir", "yes"))
656         if src.product.check_installation(config, prod_info):
657             l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir,
658                                   is_single_dir, prod_info.install_mode))
659         else:
660             l_not_installed.append(prod_name)
661         
662         # Add also the cpp generated modules (if any)
663         if src.product.product_is_cpp(prod_info):
664             # cpp module
665             for name_cpp in src.product.get_product_components(prod_info):
666                 install_dir = os.path.join(config.APPLICATION.workdir,
667                                            config.INTERNAL.config.install_dir,
668                                            name_cpp) 
669                 if os.path.exists(install_dir):
670                     l_install_dir.append((name_cpp, name_cpp, install_dir, False, "value"))
671                 else:
672                     l_not_installed.append(name_cpp)
673         
674     # check the name of the directory that (could) contains the binaries 
675     # from previous detar
676     binaries_from_detar = os.path.join(
677                               config.APPLICATION.workdir,
678                               config.INTERNAL.config.binary_dir + config.VARS.dist)
679     if os.path.exists(binaries_from_detar):
680          logger.write("""
681 WARNING: existing binaries directory from previous detar installation:
682          %s
683          To make new package from this, you have to: 
684          1) install binaries in INSTALL directory with the script "install_bin.sh" 
685             see README file for more details
686          2) or recompile everything in INSTALL with "sat compile" command 
687             this step is long, and requires some linux packages to be installed 
688             on your system\n
689 """ % binaries_from_detar)
690     
691     # Print warning or error if there are some missing products
692     if len(l_not_installed) > 0:
693         text_missing_prods = ""
694         for p_name in l_not_installed:
695             text_missing_prods += " - " + p_name + "\n"
696         if not options.force_creation:
697             msg = _("ERROR: there are missing product installations:")
698             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
699                                      text_missing_prods),
700                          1)
701             raise src.SatException(msg)
702         else:
703             msg = _("WARNING: there are missing products installations:")
704             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
705                                      text_missing_prods),
706                          1)
707
708     # Do the same for sources
709     if len(l_sources_not_present) > 0:
710         text_missing_prods = ""
711         for p_name in l_sources_not_present:
712             text_missing_prods += "-" + p_name + "\n"
713         if not options.force_creation:
714             msg = _("ERROR: there are missing product sources:")
715             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
716                                      text_missing_prods),
717                          1)
718             raise src.SatException(msg)
719         else:
720             msg = _("WARNING: there are missing products sources:")
721             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
722                                      text_missing_prods),
723                          1)
724  
725     # construct the name of the directory that will contain the binaries
726     if src.architecture.is_windows():
727         binaries_dir_name = config.INTERNAL.config.binary_dir
728     else:
729         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
730     # construct the correlation table between the product names, there 
731     # actual install directories and there install directory in archive
732     d_products = {}
733     for prod_name, prod_info_name, install_dir, is_single_dir, install_mode in l_install_dir:
734         prod_base_name=os.path.basename(install_dir)
735         if install_mode == "base":
736             # case of a products installed in base. 
737             # because the archive is in base:no mode, the name of the install dir is different inside archive
738             # we set it to the product name or by PRODUCTS if single-dir
739             if is_single_dir:
740                 prod_base_name=config.INTERNAL.config.single_install_dir
741             else:
742                 prod_base_name=prod_info_name
743         path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
744         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
745         
746     for prod_name, source_dir in l_source_dir:
747         path_in_archive = os.path.join("SOURCES", prod_name)
748         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
749
750     # for packages of SALOME applications including KERNEL, 
751     # we produce a salome launcher or a virtual application (depending on salome version)
752     if 'KERNEL' in config.APPLICATION.products:
753         VersionSalome = src.get_salome_version(config)
754         # Case where SALOME has the launcher that uses the SalomeContext API
755         if VersionSalome >= 730:
756             # create the relative launcher and add it to the files to add
757             launcher_name = src.get_launcher_name(config)
758             launcher_package = produce_relative_launcher(config,
759                                                  logger,
760                                                  tmp_working_dir,
761                                                  launcher_name,
762                                                  binaries_dir_name)
763             d_products["launcher"] = (launcher_package, launcher_name)
764
765             # if the application contains mesa products, we generate in addition to the 
766             # classical salome launcher a launcher using mesa and called mesa_salome 
767             # (the mesa launcher will be used for remote usage through ssh).
768             if generate_mesa_launcher:
769                 #if there is one : store the use_mesa property
770                 restore_use_mesa_option=None
771                 if ('properties' in config.APPLICATION and 
772                     'use_mesa' in config.APPLICATION.properties):
773                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
774
775                 # activate mesa property, and generate a mesa launcher
776                 src.activate_mesa_property(config)  #activate use_mesa property
777                 launcher_mesa_name="mesa_"+launcher_name
778                 launcher_package_mesa = produce_relative_launcher(config,
779                                                      logger,
780                                                      tmp_working_dir,
781                                                      launcher_mesa_name,
782                                                      binaries_dir_name)
783                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
784
785                 # if there was a use_mesa value, we restore it
786                 # else we set it to the default value "no"
787                 if restore_use_mesa_option != None:
788                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
789                 else:
790                     config.APPLICATION.properties.use_mesa="no"
791
792             if options.sources:
793                 # if we mix binaries and sources, we add a copy of the launcher, 
794                 # prefixed  with "bin",in order to avoid clashes
795                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
796         else:
797             # Provide a script for the creation of an application EDF style
798             appli_script = product_appli_creation_script(config,
799                                                         logger,
800                                                         tmp_working_dir,
801                                                         binaries_dir_name)
802             
803             d_products["appli script"] = (appli_script, "create_appli.py")
804
805     # Put also the environment file
806     env_file = produce_relative_env_files(config,
807                                            logger,
808                                            tmp_working_dir,
809                                            binaries_dir_name)
810
811     if src.architecture.is_windows():
812       filename  = "env_launch.bat"
813     else:
814       filename  = "env_launch.sh"
815     d_products["environment file"] = (env_file, filename)      
816
817     # If option exe, produce an extra launcher based on specified exe
818     if options.exe:
819         exe_file = produce_relative_env_files(config,
820                                               logger,
821                                               tmp_working_dir,
822                                               binaries_dir_name,
823                                               options.exe)
824             
825         if src.architecture.is_windows():
826           filename  = os.path.basename(options.exe) + ".bat"
827         else:
828           filename  = os.path.basename(options.exe) + ".sh"
829         d_products["exe file"] = (exe_file, filename)      
830     
831
832     return d_products
833
834 def source_package(sat, config, logger, options, tmp_working_dir):
835     '''Prepare a dictionary that stores all the needed directories and files to
836        add in a source package.
837     
838     :param config Config: The global configuration.
839     :param logger Logger: the logging instance
840     :param options OptResult: the options of the launched command
841     :param tmp_working_dir str: The temporary local directory containing some 
842                                 specific directories or files needed in the 
843                                 binary package
844     :return: the dictionary that stores all the needed directories and files to
845              add in a source package.
846              {label : (path_on_local_machine, path_in_archive)}
847     :rtype: dict
848     '''
849     
850     d_archives={}
851     # Get all the products that are prepared using an archive
852     # unless ftp mode is specified (in this case the user of the
853     # archive will get the sources through the ftp mode of sat prepare
854     if not options.ftp:
855         logger.write("Find archive products ... ")
856         d_archives, l_pinfo_vcs = get_archives(config, logger)
857         logger.write("Done\n")
858
859     d_archives_vcs = {}
860     if not options.with_vcs and len(l_pinfo_vcs) > 0:
861         # Make archives with the products that are not prepared using an archive
862         # (git, cvs, svn, etc)
863         logger.write("Construct archives for vcs products ... ")
864         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
865                                           sat,
866                                           config,
867                                           logger,
868                                           tmp_working_dir)
869         logger.write("Done\n")
870
871     # Create a project
872     logger.write("Create the project ... ")
873     d_project = create_project_for_src_package(config,
874                                                tmp_working_dir,
875                                                options.with_vcs,
876                                                options.ftp)
877     logger.write("Done\n")
878     
879     # Add salomeTools
880     tmp_sat = add_salomeTools(config, tmp_working_dir)
881     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
882     
883     # Add a sat symbolic link if not win
884     if not src.architecture.is_windows():
885         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
886         try:
887             t = os.getcwd()
888         except:
889             # In the jobs, os.getcwd() can fail
890             t = config.LOCAL.workdir
891         os.chdir(tmp_working_dir)
892         if os.path.lexists(tmp_satlink_path):
893             os.remove(tmp_satlink_path)
894         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
895         os.chdir(t)
896         
897         d_sat["sat link"] = (tmp_satlink_path, "sat")
898     
899     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
900     return d_source
901
902 def get_archives(config, logger):
903     '''Find all the products that are get using an archive and all the products
904        that are get using a vcs (git, cvs, svn) repository.
905     
906     :param config Config: The global configuration.
907     :param logger Logger: the logging instance
908     :return: the dictionary {name_product : 
909              (local path of its archive, path in the package of its archive )}
910              and the list of specific configuration corresponding to the vcs 
911              products
912     :rtype: (Dict, List)
913     '''
914     # Get the list of product informations
915     l_products_name = config.APPLICATION.products.keys()
916     l_product_info = src.product.get_products_infos(l_products_name,
917                                                     config)
918     d_archives = {}
919     l_pinfo_vcs = []
920     for p_name, p_info in l_product_info:
921         # skip product with property not_in_package set to yes
922         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
923             continue  
924         # ignore the native and fixed products
925         if (src.product.product_is_native(p_info) 
926                 or src.product.product_is_fixed(p_info)):
927             continue
928         if p_info.get_source == "archive":
929             archive_path = p_info.archive_info.archive_name
930             archive_name = os.path.basename(archive_path)
931             d_archives[p_name] = (archive_path,
932                                   os.path.join(ARCHIVE_DIR, archive_name))
933             if (src.appli_test_property(config,"pip", "yes") and 
934                 src.product.product_test_property(p_info,"pip", "yes")):
935                 # if pip mode is activated, and product is managed by pip
936                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
937                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
938                     "%s-%s*" % (p_info.name, p_info.version))
939                 pip_wheel_path=glob.glob(pip_wheel_pattern)
940                 msg_pip_not_found="Error in get_archive, pip wheel for "\
941                                   "product %s-%s was not found in %s directory"
942                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
943                                   "product %s-%s were found in %s directory"
944                 if len(pip_wheel_path)==0:
945                     raise src.SatException(msg_pip_not_found %\
946                         (p_info.name, p_info.version, pip_wheels_dir))
947                 if len(pip_wheel_path)>1:
948                     raise src.SatException(msg_pip_two_or_more %\
949                         (p_info.name, p_info.version, pip_wheels_dir))
950
951                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
952                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
953                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
954         else:
955             # this product is not managed by archive, 
956             # an archive of the vcs directory will be created by get_archive_vcs
957             l_pinfo_vcs.append((p_name, p_info)) 
958             
959     return d_archives, l_pinfo_vcs
960
961 def add_salomeTools(config, tmp_working_dir):
962     '''Prepare a version of salomeTools that has a specific local.pyconf file 
963        configured for a source package.
964
965     :param config Config: The global configuration.
966     :param tmp_working_dir str: The temporary local directory containing some 
967                                 specific directories or files needed in the 
968                                 source package
969     :return: The path to the local salomeTools directory to add in the package
970     :rtype: str
971     '''
972     # Copy sat in the temporary working directory
973     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
974     sat_running_path = src.Path(config.VARS.salometoolsway)
975     sat_running_path.copy(sat_tmp_path)
976     
977     # Update the local.pyconf file that contains the path to the project
978     local_pyconf_name = "local.pyconf"
979     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
980     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
981     # Remove the .pyconf file in the root directory of salomeTools if there is
982     # any. (For example when launching jobs, a pyconf file describing the jobs 
983     # can be here and is not useful) 
984     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
985     for file_or_dir in files_or_dir_SAT:
986         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
987             file_path = os.path.join(tmp_working_dir,
988                                      "salomeTools",
989                                      file_or_dir)
990             os.remove(file_path)
991     
992     ff = open(local_pyconf_file, "w")
993     ff.write(LOCAL_TEMPLATE)
994     ff.close()
995     
996     return sat_tmp_path.path
997
998 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
999     '''For sources package that require that all products are get using an 
1000        archive, one has to create some archive for the vcs products.
1001        So this method calls the clean and source command of sat and then create
1002        the archives.
1003
1004     :param l_pinfo_vcs List: The list of specific configuration corresponding to
1005                              each vcs product
1006     :param sat Sat: The Sat instance that can be called to clean and source the
1007                     products
1008     :param config Config: The global configuration.
1009     :param logger Logger: the logging instance
1010     :param tmp_working_dir str: The temporary local directory containing some 
1011                                 specific directories or files needed in the 
1012                                 source package
1013     :return: the dictionary that stores all the archives to add in the source 
1014              package. {label : (path_on_local_machine, path_in_archive)}
1015     :rtype: dict
1016     '''
1017     # clean the source directory of all the vcs products, then use the source 
1018     # command and thus construct an archive that will not contain the patches
1019     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
1020     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
1021       logger.write(_("\nclean sources\n"))
1022       args_clean = config.VARS.application
1023       args_clean += " --sources --products "
1024       args_clean += ",".join(l_prod_names)
1025       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
1026       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
1027     if True:
1028       # source
1029       logger.write(_("get sources\n"))
1030       args_source = config.VARS.application
1031       args_source += " --products "
1032       args_source += ",".join(l_prod_names)
1033       svgDir = sat.cfg.APPLICATION.workdir
1034       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
1035       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
1036       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
1037       # DBG.write("sat config id", id(sat.cfg), True)
1038       # shit as config is not same id() as for sat.source()
1039       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
1040       import source
1041       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
1042       
1043       # make the new archives
1044       d_archives_vcs = {}
1045       for pn, pinfo in l_pinfo_vcs:
1046           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
1047           logger.write("make archive vcs '%s'\n" % path_archive)
1048           d_archives_vcs[pn] = (path_archive,
1049                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
1050       sat.cfg.APPLICATION.workdir = svgDir
1051       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
1052     return d_archives_vcs
1053
1054 def make_archive(prod_name, prod_info, where):
1055     '''Create an archive of a product by searching its source directory.
1056
1057     :param prod_name str: The name of the product.
1058     :param prod_info Config: The specific configuration corresponding to the 
1059                              product
1060     :param where str: The path of the repository where to put the resulting 
1061                       archive
1062     :return: The path of the resulting archive
1063     :rtype: str
1064     '''
1065     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1066     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1067     local_path = prod_info.source_dir
1068     if old_python:
1069         tar_prod.add(local_path,
1070                      arcname=prod_name,
1071                      exclude=exclude_VCS_and_extensions_26)
1072     else:
1073         tar_prod.add(local_path,
1074                      arcname=prod_name,
1075                      filter=exclude_VCS_and_extensions)
1076     tar_prod.close()
1077     return path_targz_prod       
1078
1079 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
1080     '''Create a specific project for a source package.
1081
1082     :param config Config: The global configuration.
1083     :param tmp_working_dir str: The temporary local directory containing some 
1084                                 specific directories or files needed in the 
1085                                 source package
1086     :param with_vcs boolean: True if the package is with vcs products (not 
1087                              transformed into archive products)
1088     :param with_ftp boolean: True if the package use ftp servers to get archives
1089     :return: The dictionary 
1090              {"project" : (produced project, project path in the archive)}
1091     :rtype: Dict
1092     '''
1093
1094     # Create in the working temporary directory the full project tree
1095     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1096     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
1097                                          "products")
1098     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
1099                                          "products",
1100                                          "compil_scripts")
1101     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
1102                                          "products",
1103                                          "env_scripts")
1104     patches_tmp_dir = os.path.join(project_tmp_dir,
1105                                          "products",
1106                                          "patches")
1107     application_tmp_dir = os.path.join(project_tmp_dir,
1108                                          "applications")
1109     for directory in [project_tmp_dir,
1110                       compil_scripts_tmp_dir,
1111                       env_scripts_tmp_dir,
1112                       patches_tmp_dir,
1113                       application_tmp_dir]:
1114         src.ensure_path_exists(directory)
1115
1116     # Create the pyconf that contains the information of the project
1117     project_pyconf_name = "project.pyconf"        
1118     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1119     ff = open(project_pyconf_file, "w")
1120     ff.write(PROJECT_TEMPLATE)
1121     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1122         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
1123         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1124             ftp_path=ftp_path+":"+ftpserver
1125         ftp_path+='"'
1126         ff.write("# ftp servers where to search for prerequisite archives\n")
1127         ff.write(ftp_path)
1128     # add licence paths if any
1129     if len(config.PATHS.LICENCEPATH) > 0:  
1130         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1131         for path in config.PATHS.LICENCEPATH[1:]:
1132             licence_path=licence_path+":"+path
1133         licence_path+='"'
1134         ff.write("\n# Where to search for licences\n")
1135         ff.write(licence_path)
1136         
1137
1138     ff.close()
1139     
1140     # Loop over the products to get there pyconf and all the scripts 
1141     # (compilation, environment, patches)
1142     # and create the pyconf file to add to the project
1143     lproducts_name = config.APPLICATION.products.keys()
1144     l_products = src.product.get_products_infos(lproducts_name, config)
1145     for p_name, p_info in l_products:
1146         # skip product with property not_in_package set to yes
1147         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1148             continue  
1149         find_product_scripts_and_pyconf(p_name,
1150                                         p_info,
1151                                         config,
1152                                         with_vcs,
1153                                         compil_scripts_tmp_dir,
1154                                         env_scripts_tmp_dir,
1155                                         patches_tmp_dir,
1156                                         products_pyconf_tmp_dir)
1157     
1158     # for the application pyconf, we write directly the config
1159     # don't search for the original pyconf file
1160     # to avoid problems with overwrite sections and rm_products key
1161     write_application_pyconf(config, application_tmp_dir)
1162     
1163     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1164     return d_project
1165
1166 def find_product_scripts_and_pyconf(p_name,
1167                                     p_info,
1168                                     config,
1169                                     with_vcs,
1170                                     compil_scripts_tmp_dir,
1171                                     env_scripts_tmp_dir,
1172                                     patches_tmp_dir,
1173                                     products_pyconf_tmp_dir):
1174     '''Create a specific pyconf file for a given product. Get its environment 
1175        script, its compilation script and patches and put it in the temporary
1176        working directory. This method is used in the source package in order to
1177        construct the specific project.
1178
1179     :param p_name str: The name of the product.
1180     :param p_info Config: The specific configuration corresponding to the 
1181                              product
1182     :param config Config: The global configuration.
1183     :param with_vcs boolean: True if the package is with vcs products (not 
1184                              transformed into archive products)
1185     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1186                                        scripts directory of the project.
1187     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1188                                     directory of the project.
1189     :param patches_tmp_dir str: The path to the temporary patch scripts 
1190                                 directory of the project.
1191     :param products_pyconf_tmp_dir str: The path to the temporary product 
1192                                         scripts directory of the project.
1193     '''
1194     
1195     # read the pyconf of the product
1196     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1197
1198     # find the compilation script if any
1199     if src.product.product_has_script(p_info):
1200         compil_script_path = src.Path(p_info.compil_script)
1201         compil_script_path.copy(compil_scripts_tmp_dir)
1202
1203     # find the environment script if any
1204     if src.product.product_has_env_script(p_info):
1205         env_script_path = src.Path(p_info.environ.env_script)
1206         env_script_path.copy(env_scripts_tmp_dir)
1207
1208     # find the patches if any
1209     if src.product.product_has_patches(p_info):
1210         patches = src.pyconf.Sequence()
1211         for patch_path in p_info.patches:
1212             p_path = src.Path(patch_path)
1213             p_path.copy(patches_tmp_dir)
1214             patches.append(os.path.basename(patch_path), "")
1215
1216     if (not with_vcs) and src.product.product_is_vcs(p_info):
1217         # in non vcs mode, if the product is not archive, then make it become archive.
1218
1219         # depending upon the incremental mode, select impacted sections
1220         if "properties" in p_info and "incremental" in p_info.properties and\
1221             p_info.properties.incremental == "yes":
1222             sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1223         else:
1224             sections = [p_info.section]
1225         for section in sections:
1226             if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1227                 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1228                           (p_name,section))
1229                 product_pyconf_cfg[section].get_source = "archive"
1230                 if not "archive_info" in product_pyconf_cfg[section]:
1231                     product_pyconf_cfg[section].addMapping("archive_info",
1232                                         src.pyconf.Mapping(product_pyconf_cfg),
1233                                         "")
1234                     product_pyconf_cfg[section].archive_info.archive_name =\
1235                         p_info.name + ".tgz"
1236     
1237     if (with_vcs) and src.product.product_is_vcs(p_info):
1238         # in vcs mode we must replace explicitely the git server url
1239         # (or it will not be found later because project files are not exported in archives)
1240         for section in product_pyconf_cfg:
1241             # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
1242             if "git_info" in product_pyconf_cfg[section]:
1243                 for repo in product_pyconf_cfg[section].git_info:
1244                     if repo in p_info.git_info:
1245                         product_pyconf_cfg[section].git_info[repo] =  p_info.git_info[repo]
1246
1247     # write the pyconf file to the temporary project location
1248     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1249                                            p_name + ".pyconf")
1250     ff = open(product_tmp_pyconf_path, 'w')
1251     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1252     product_pyconf_cfg.__save__(ff, 1)
1253     ff.close()
1254
1255
1256 def write_application_pyconf(config, application_tmp_dir):
1257     '''Write the application pyconf file in the specific temporary 
1258        directory containing the specific project of a source package.
1259
1260     :param config Config: The global configuration.
1261     :param application_tmp_dir str: The path to the temporary application 
1262                                     scripts directory of the project.
1263     '''
1264     application_name = config.VARS.application
1265     # write the pyconf file to the temporary application location
1266     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1267                                                application_name + ".pyconf")
1268     with open(application_tmp_pyconf_path, 'w') as f:
1269         f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1270         res = src.pyconf.Config()
1271         app = src.pyconf.deepCopyMapping(config.APPLICATION)
1272
1273         # set base mode to "no" for the archive
1274         app.base = "no"
1275
1276         # Change the workdir
1277         app.workdir = src.pyconf.Reference(
1278                                  app,
1279                                  src.pyconf.DOLLAR,
1280                                  'VARS.salometoolsway + $VARS.sep + ".."')
1281         res.addMapping("APPLICATION", app, "")
1282         res.__save__(f, evaluated=False)
1283     
1284
1285 def sat_package(config, tmp_working_dir, options, logger):
1286     '''Prepare a dictionary that stores all the needed directories and files to
1287        add in a salomeTool package.
1288     
1289     :param tmp_working_dir str: The temporary local working directory 
1290     :param options OptResult: the options of the launched command
1291     :return: the dictionary that stores all the needed directories and files to
1292              add in a salomeTool package.
1293              {label : (path_on_local_machine, path_in_archive)}
1294     :rtype: dict
1295     '''
1296     d_project = {}
1297
1298     # we include sat himself
1299     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1300
1301     # and we overwrite local.pyconf with a clean wersion.
1302     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1303     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1304     local_cfg = src.pyconf.Config(local_file_path)
1305     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1306     local_cfg.LOCAL["base"] = "default"
1307     local_cfg.LOCAL["workdir"] = "default"
1308     local_cfg.LOCAL["log_dir"] = "default"
1309     local_cfg.LOCAL["archive_dir"] = "default"
1310     local_cfg.LOCAL["VCS"] = "None"
1311     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1312
1313     # if the archive contains a project, we write its relative path in local.pyconf
1314     if options.project:
1315         project_arch_path = os.path.join("projects", options.project, 
1316                                          os.path.basename(options.project_file_path))
1317         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1318
1319     ff = open(local_pyconf_tmp_path, 'w')
1320     local_cfg.__save__(ff, 1)
1321     ff.close()
1322     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1323     return d_project
1324     
1325
1326 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1327     '''Prepare a dictionary that stores all the needed directories and files to
1328        add in a project package.
1329     
1330     :param project_file_path str: The path to the local project.
1331     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1332     :param tmp_working_dir str: The temporary local directory containing some 
1333                                 specific directories or files needed in the 
1334                                 project package
1335     :param embedded_in_sat boolean : the project package is embedded in a sat package
1336     :return: the dictionary that stores all the needed directories and files to
1337              add in a project package.
1338              {label : (path_on_local_machine, path_in_archive)}
1339     :rtype: dict
1340     '''
1341     d_project = {}
1342     # Read the project file and get the directories to add to the package
1343     
1344     try: 
1345       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1346     except:
1347       logger.write("""
1348 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1349       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1350       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1351     
1352     paths = {"APPLICATIONPATH" : "applications",
1353              "PRODUCTPATH" : "products",
1354              "JOBPATH" : "jobs",
1355              "MACHINEPATH" : "machines"}
1356     if not ftp_mode:
1357         paths["ARCHIVEPATH"] = "archives"
1358
1359     # Loop over the project paths and add it
1360     project_file_name = os.path.basename(project_file_path)
1361     for path in paths:
1362         if path not in project_pyconf_cfg:
1363             continue
1364         if embedded_in_sat:
1365             dest_path = os.path.join("projects", name_project, paths[path])
1366             project_file_dest = os.path.join("projects", name_project, project_file_name)
1367         else:
1368             dest_path = paths[path]
1369             project_file_dest = project_file_name
1370
1371         # Add the directory to the files to add in the package
1372         d_project[path] = (project_pyconf_cfg[path], dest_path)
1373
1374         # Modify the value of the path in the package
1375         project_pyconf_cfg[path] = src.pyconf.Reference(
1376                                     project_pyconf_cfg,
1377                                     src.pyconf.DOLLAR,
1378                                     'project_path + "/' + paths[path] + '"')
1379     
1380     # Modify some values
1381     if "project_path" not in project_pyconf_cfg:
1382         project_pyconf_cfg.addMapping("project_path",
1383                                       src.pyconf.Mapping(project_pyconf_cfg),
1384                                       "")
1385     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1386                                                            src.pyconf.DOLLAR,
1387                                                            'PWD')
1388     # we don't want to export these two fields
1389     project_pyconf_cfg.__delitem__("file_path")
1390     project_pyconf_cfg.__delitem__("PWD")
1391     if ftp_mode:
1392         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1393     
1394     # Write the project pyconf file
1395     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1396     ff = open(project_pyconf_tmp_path, 'w')
1397     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1398     project_pyconf_cfg.__save__(ff, 1)
1399     ff.close()
1400     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1401     
1402     return d_project
1403
1404 def add_readme(config, options, where):
1405     readme_path = os.path.join(where, "README")
1406     with codecs.open(readme_path, "w", 'utf-8') as f:
1407
1408     # templates for building the header
1409         readme_header="""
1410 # This package was generated with sat $version
1411 # Date: $date
1412 # User: $user
1413 # Distribution : $dist
1414
1415 In the following, $$ROOT represents the directory where you have installed 
1416 SALOME (the directory where this file is located).
1417
1418 """
1419         if src.architecture.is_windows():
1420             readme_header = readme_header.replace('$$ROOT','%ROOT%')
1421         readme_compilation_with_binaries="""
1422
1423 compilation based on the binaries used as prerequisites
1424 =======================================================
1425
1426 If you fail to compile the complete application (for example because
1427 you are not root on your system and cannot install missing packages), you
1428 may try a partial compilation based on the binaries.
1429 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1430 and do some substitutions on cmake and .la files (replace the build directories
1431 with local paths).
1432 The procedure to do it is:
1433  1) Remove or rename INSTALL directory if it exists
1434  2) Execute the shell script install_bin.sh:
1435  > cd $ROOT
1436  > ./install_bin.sh
1437  3) Use SalomeTool (as explained in Sources section) and compile only the 
1438     modules you need to (with -p option)
1439
1440 """
1441         readme_header_tpl=string.Template(readme_header)
1442         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1443                 "README_BIN.template")
1444         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1445                 "README_LAUNCHER.template")
1446         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1447                 "README_BIN_VIRTUAL_APP.template")
1448         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1449                 "README_SRC.template")
1450         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1451                 "README_PROJECT.template")
1452         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1453                 "README_SAT.template")
1454
1455         # prepare substitution dictionary
1456         d = dict()
1457         d['user'] = config.VARS.user
1458         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1459         d['version'] = src.get_salometool_version(config)
1460         d['dist'] = config.VARS.dist
1461         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1462
1463         if options.binaries or options.sources:
1464             d['application'] = config.VARS.application
1465             d['BINARIES']    = config.INTERNAL.config.binary_dir
1466             d['SEPARATOR'] = config.VARS.sep
1467             if src.architecture.is_windows():
1468                 d['operatingSystem'] = 'Windows'
1469                 d['PYTHON3'] = 'python3'
1470                 d['ROOT']    = '%ROOT%'
1471             else:
1472                 d['operatingSystem'] = 'Linux'
1473                 d['PYTHON3'] = ''
1474                 d['ROOT']    = '$ROOT'
1475             f.write("# Application: " + d['application'] + "\n")
1476             if 'KERNEL' in config.APPLICATION.products:
1477                 VersionSalome = src.get_salome_version(config)
1478                 # Case where SALOME has the launcher that uses the SalomeContext API
1479                 if VersionSalome >= 730:
1480                     d['launcher'] = config.APPLICATION.profile.launcher_name
1481                 else:
1482                     d['virtual_app'] = 'runAppli' # this info is not used now)
1483
1484         # write the specific sections
1485         if options.binaries:
1486             f.write(src.template.substitute(readme_template_path_bin, d))
1487             if "virtual_app" in d:
1488                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1489             if "launcher" in d:
1490                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1491
1492         if options.sources:
1493             f.write(src.template.substitute(readme_template_path_src, d))
1494
1495         if options.binaries and options.sources and not src.architecture.is_windows():
1496             f.write(readme_compilation_with_binaries)
1497
1498         if options.project:
1499             f.write(src.template.substitute(readme_template_path_pro, d))
1500
1501         if options.sat:
1502             f.write(src.template.substitute(readme_template_path_sat, d))
1503     
1504     return readme_path
1505
1506 def update_config(config, logger,  prop, value):
1507     '''Remove from config.APPLICATION.products the products that have the property given as input.
1508     
1509     :param config Config: The global config.
1510     :param prop str: The property to filter
1511     :param value str: The value of the property to filter
1512     '''
1513     # if there is no APPLICATION (ex sat package -t) : nothing to do
1514     if "APPLICATION" in config:
1515         l_product_to_remove = []
1516         for product_name in config.APPLICATION.products.keys():
1517             prod_cfg = src.product.get_product_config(config, product_name)
1518             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1519                 l_product_to_remove.append(product_name)
1520         for product_name in l_product_to_remove:
1521             config.APPLICATION.products.__delitem__(product_name)
1522             logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1523
1524 def description():
1525     '''method that is called when salomeTools is called with --help option.
1526     
1527     :return: The text to display for the package command description.
1528     :rtype: str
1529     '''
1530     return _("""
1531 The package command creates a tar file archive of a product.
1532 There are four kinds of archive, which can be mixed:
1533
1534  1 - The binary archive. 
1535      It contains the product installation directories plus a launcher.
1536  2 - The sources archive. 
1537      It contains the product archives, a project (the application plus salomeTools).
1538  3 - The project archive. 
1539      It contains a project (give the project file path as argument).
1540  4 - The salomeTools archive. 
1541      It contains code utility salomeTools.
1542
1543 example:
1544  >> sat package SALOME-master --binaries --sources""")
1545   
1546 def run(args, runner, logger):
1547     '''method that is called when salomeTools is called with package parameter.
1548     '''
1549     
1550     # Parse the options
1551     (options, args) = parser.parse_args(args)
1552
1553     
1554     # Check that a type of package is called, and only one
1555     all_option_types = (options.binaries,
1556                         options.sources,
1557                         options.project not in ["", None],
1558                         options.sat,
1559                         options.bin_products)
1560
1561     # Check if no option for package type
1562     if all_option_types.count(True) == 0:
1563         msg = _("Error: Precise a type for the package\nUse one of the "
1564                 "following options: --binaries, --sources, --project or"
1565                 " --salometools, --bin_products")
1566         logger.write(src.printcolors.printcError(msg), 1)
1567         logger.write("\n", 1)
1568         return 1
1569     do_create_package = options.binaries or options.sources or options.project or options.sat 
1570
1571     if options.bin_products:
1572         ret = bin_products_archives(runner.cfg)
1573     if ret!=0:
1574         return ret
1575     if not do_create_package:
1576         return 0
1577
1578     # continue to create a tar.gz package 
1579
1580     # The repository where to put the package if not Binary or Source
1581     package_default_path = runner.cfg.LOCAL.workdir
1582     # if the package contains binaries or sources:
1583     if options.binaries or options.sources or options.bin_products:
1584         # Check that the command has been called with an application
1585         src.check_config_has_application(runner.cfg)
1586
1587         # Display information
1588         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1589                                                     runner.cfg.VARS.application), 1)
1590         
1591         # Get the default directory where to put the packages
1592         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1593         src.ensure_path_exists(package_default_path)
1594         
1595     # if the package contains a project:
1596     if options.project:
1597         # check that the project is visible by SAT
1598         projectNameFile = options.project + ".pyconf"
1599         foundProject = None
1600         for i in runner.cfg.PROJECTS.project_file_paths:
1601             baseName = os.path.basename(i)
1602             if baseName == projectNameFile:
1603                 foundProject = i
1604                 break
1605
1606         if foundProject is None:
1607             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1608             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1609 known projects are:
1610 %(2)s
1611
1612 Please add it in file:
1613 %(3)s""" % \
1614                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1615             logger.write(src.printcolors.printcError(msg), 1)
1616             logger.write("\n", 1)
1617             return 1
1618         else:
1619             options.project_file_path = foundProject
1620             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1621     
1622     # Remove the products that are filtered by the --without_properties option
1623     if options.without_properties:
1624         prop, value = options.without_properties
1625         update_config(runner.cfg, logger, prop, value)
1626
1627     # Remove from config the products that have the not_in_package property
1628     update_config(runner.cfg, logger, "not_in_package", "yes")
1629
1630     # get the name of the archive or build it
1631     if options.name:
1632         if os.path.basename(options.name) == options.name:
1633             # only a name (not a path)
1634             archive_name = options.name           
1635             dir_name = package_default_path
1636         else:
1637             archive_name = os.path.basename(options.name)
1638             dir_name = os.path.dirname(options.name)
1639         
1640         # suppress extension
1641         if archive_name[-len(".tgz"):] == ".tgz":
1642             archive_name = archive_name[:-len(".tgz")]
1643         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1644             archive_name = archive_name[:-len(".tar.gz")]
1645         
1646     else:
1647         archive_name=""
1648         dir_name = package_default_path
1649         if options.binaries or options.sources:
1650             archive_name = runner.cfg.APPLICATION.name
1651
1652         if options.binaries:
1653             archive_name += "-"+runner.cfg.VARS.dist
1654             
1655         if options.sources:
1656             archive_name += "-SRC"
1657             if options.with_vcs:
1658                 archive_name += "-VCS"
1659
1660         if options.sat:
1661             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1662
1663         if options.project:
1664             if options.sat:
1665                 archive_name += "_" 
1666             archive_name += ("satproject_" + options.project)
1667  
1668         if len(archive_name)==0: # no option worked 
1669             msg = _("Error: Cannot name the archive\n"
1670                     " check if at least one of the following options was "
1671                     "selected : --binaries, --sources, --project or"
1672                     " --salometools")
1673             logger.write(src.printcolors.printcError(msg), 1)
1674             logger.write("\n", 1)
1675             return 1
1676  
1677     path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1678     
1679     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1680
1681     # Create a working directory for all files that are produced during the
1682     # package creation and that will be removed at the end of the command
1683     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1684     src.ensure_path_exists(tmp_working_dir)
1685     logger.write("\n", 5)
1686     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1687     
1688     logger.write("\n", 3)
1689
1690     msg = _("Preparation of files to add to the archive")
1691     logger.write(src.printcolors.printcLabel(msg), 2)
1692     logger.write("\n", 2)
1693     
1694     d_files_to_add={}  # content of the archive
1695
1696     # a dict to hold paths that will need to be substitute for users recompilations
1697     d_paths_to_substitute={}  
1698
1699     if options.binaries:
1700         d_bin_files_to_add = binary_package(runner.cfg,
1701                                             logger,
1702                                             options,
1703                                             tmp_working_dir)
1704         # for all binaries dir, store the substitution that will be required 
1705         # for extra compilations
1706         for key in d_bin_files_to_add:
1707             if key.endswith("(bin)"):
1708                 source_dir = d_bin_files_to_add[key][0]
1709                 path_in_archive = d_bin_files_to_add[key][1].replace(
1710                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1711                    runner.cfg.INTERNAL.config.install_dir)
1712                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1713                     # if basename is the same we will just substitute the dirname 
1714                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1715                         os.path.dirname(path_in_archive)
1716                 else:
1717                     d_paths_to_substitute[source_dir]=path_in_archive
1718
1719         d_files_to_add.update(d_bin_files_to_add)
1720     if options.sources:
1721         d_files_to_add.update(source_package(runner,
1722                                         runner.cfg,
1723                                         logger, 
1724                                         options,
1725                                         tmp_working_dir))
1726         if options.binaries:
1727             # for archives with bin and sources we provide a shell script able to 
1728             # install binaries for compilation
1729             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1730                                                       tmp_working_dir,
1731                                                       d_paths_to_substitute,
1732                                                       "install_bin.sh")
1733             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1734             logger.write("substitutions that need to be done later : \n", 5)
1735             logger.write(str(d_paths_to_substitute), 5)
1736             logger.write("\n", 5)
1737     else:
1738         # --salomeTool option is not considered when --sources is selected, as this option
1739         # already brings salomeTool!
1740         if options.sat:
1741             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1742                                   options, logger))
1743         
1744     if options.project:
1745         DBG.write("config for package %s" % options.project, runner.cfg)
1746         d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1747
1748     if not(d_files_to_add):
1749         msg = _("Error: Empty dictionnary to build the archive!\n")
1750         logger.write(src.printcolors.printcError(msg), 1)
1751         logger.write("\n", 1)
1752         return 1
1753
1754     # Add the README file in the package
1755     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1756     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1757
1758     # Add the additional files of option add_files
1759     if options.add_files:
1760         for file_path in options.add_files:
1761             if not os.path.exists(file_path):
1762                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1763                 continue
1764             file_name = os.path.basename(file_path)
1765             d_files_to_add[file_name] = (file_path, file_name)
1766
1767     logger.write("\n", 2)
1768     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1769     logger.write("\n", 2)
1770     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1771
1772     res = 0
1773     try:
1774         # Creating the object tarfile
1775         tar = tarfile.open(path_targz, mode='w:gz')
1776         
1777         # get the filtering function if needed
1778         if old_python:
1779             filter_function = exclude_VCS_and_extensions_26
1780         else:
1781             filter_function = exclude_VCS_and_extensions
1782
1783         # Add the files to the tarfile object
1784         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1785         tar.close()
1786     except KeyboardInterrupt:
1787         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1788         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1789         # remove the working directory
1790         shutil.rmtree(tmp_working_dir)
1791         logger.write(_("OK"), 1)
1792         logger.write(_("\n"), 1)
1793         return 1
1794     
1795     # case if no application, only package sat as 'sat package -t'
1796     try:
1797         app = runner.cfg.APPLICATION
1798     except:
1799         app = None
1800
1801     # unconditionaly remove the tmp_local_working_dir
1802     if app is not None:
1803         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1804         if os.path.isdir(tmp_local_working_dir):
1805             shutil.rmtree(tmp_local_working_dir)
1806
1807     # remove the tmp directory, unless user has registered as developer
1808     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1809         shutil.rmtree(tmp_working_dir)
1810     
1811     # Print again the path of the package
1812     logger.write("\n", 2)
1813     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1814     
1815     return res