Salome HOME
Merge branch 'nct/july20'
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 BINARY = "binary"
35 SOURCE = "Source"
36 PROJECT = "Project"
37 SAT = "Sat"
38
39 ARCHIVE_DIR = "ARCHIVES"
40 PROJECT_DIR = "PROJECT"
41
42 IGNORED_DIRS = [".git", ".svn"]
43 IGNORED_EXTENSIONS = []
44
45 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
46
47 PROJECT_TEMPLATE = """#!/usr/bin/env python
48 #-*- coding:utf-8 -*-
49
50 # The path to the archive root directory
51 root_path : $PWD + "/../"
52 # path to the PROJECT
53 project_path : $PWD + "/"
54
55 # Where to search the archives of the products
56 ARCHIVEPATH : $root_path + "ARCHIVES"
57 # Where to search the pyconf of the applications
58 APPLICATIONPATH : $project_path + "applications/"
59 # Where to search the pyconf of the products
60 PRODUCTPATH : $project_path + "products/"
61 # Where to search the pyconf of the jobs of the project
62 JOBPATH : $project_path + "jobs/"
63 # Where to search the pyconf of the machines of the project
64 MACHINEPATH : $project_path + "machines/"
65 """
66
67 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
68 #-*- coding:utf-8 -*-
69
70   LOCAL :
71   {
72     base : 'default'
73     workdir : 'default'
74     log_dir : 'default'
75     archive_dir : 'default'
76     VCS : 'unknown'
77     tag : 'unknown'
78   }
79
80 PROJECTS :
81 {
82 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
83 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
84 }
85 """)
86
87 # Define all possible option for the package command :  sat package <options>
88 parser = src.options.Options()
89 parser.add_option('b', 'binaries', 'boolean', 'binaries',
90     _('Optional: Produce a binary package.'), False)
91 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
92     _('Optional: Only binary package: produce the archive even if '
93       'there are some missing products.'), False)
94 parser.add_option('s', 'sources', 'boolean', 'sources',
95     _('Optional: Produce a compilable archive of the sources of the '
96       'application.'), False)
97 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
98     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
99       'Sat prepare will use VCS mode instead to retrieve them'),
100     False)
101 parser.add_option('', 'ftp', 'boolean', 'ftp',
102     _('Optional: Do not embed archives for products in archive mode.' 
103     'Sat prepare will use ftp instead to retrieve them'),
104     False)
105 parser.add_option('p', 'project', 'string', 'project',
106     _('Optional: Produce an archive that contains a project.'), "")
107 parser.add_option('t', 'salometools', 'boolean', 'sat',
108     _('Optional: Produce an archive that contains salomeTools.'), False)
109 parser.add_option('n', 'name', 'string', 'name',
110     _('Optional: The name or full path of the archive.'), None)
111 parser.add_option('', 'add_files', 'list2', 'add_files',
112     _('Optional: The list of additional files to add to the archive.'), [])
113 parser.add_option('', 'without_properties', 'properties', 'without_properties',
114     _('Optional: Filter the products by their properties.\n\tSyntax: '
115       '--without_properties <property>:<value>'))
116
117
118 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
119     '''Create an archive containing all directories and files that are given in
120        the d_content argument.
121     
122     :param tar tarfile: The tarfile instance used to make the archive.
123     :param name_archive str: The name of the archive to make.
124     :param d_content dict: The dictionary that contain all directories and files
125                            to add in the archive.
126                            d_content[label] = 
127                                         (path_on_local_machine, path_in_archive)
128     :param logger Logger: the logging instance
129     :param f_exclude Function: the function that filters
130     :return: 0 if success, 1 if not.
131     :rtype: int
132     '''
133     # get the max length of the messages in order to make the display
134     max_len = len(max(d_content.keys(), key=len))
135     
136     success = 0
137     # loop over each directory or file stored in the d_content dictionary
138     names = sorted(d_content.keys())
139     DBG.write("add tar names", names)
140
141     # used to avoid duplications (for pip install in python, or single_install_dir cases)
142     already_added=set() 
143     for name in names:
144         # display information
145         len_points = max_len - len(name) + 3
146         local_path, archive_path = d_content[name]
147         in_archive = os.path.join(name_archive, archive_path)
148         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
149         # Get the local path and the path in archive 
150         # of the directory or file to add
151         # Add it in the archive
152         try:
153             key=local_path+"->"+in_archive
154             if key not in already_added:
155                 tar.add(local_path, arcname=in_archive, filter=f_exclude)
156                 already_added.add(key)
157             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
158         except Exception as e:
159             logger.write(src.printcolors.printcError(_("KO ")), 3)
160             logger.write(str(e), 3)
161             success = 1
162         logger.write("\n", 3)
163     return success
164
165 def exclude_VCS_and_extensions(tarinfo):
166     ''' The function that is used to exclude from package the link to the 
167         VCS repositories (like .git)
168
169     :param filename Str: The filname to exclude (or not).
170     :return: None if the file has to be exclude
171     :rtype: tarinfo or None
172     '''
173     filename = tarinfo.name
174     for dir_name in IGNORED_DIRS:
175         if dir_name in filename:
176             return None
177     for extension in IGNORED_EXTENSIONS:
178         if filename.endswith(extension):
179             return None
180     return tarinfo
181
182 def produce_relative_launcher(config,
183                               logger,
184                               file_dir,
185                               file_name,
186                               binaries_dir_name):
187     '''Create a specific SALOME launcher for the binary package. This launcher 
188        uses relative paths.
189     
190     :param config Config: The global configuration.
191     :param logger Logger: the logging instance
192     :param file_dir str: the directory where to put the launcher
193     :param file_name str: The launcher name
194     :param binaries_dir_name str: the name of the repository where the binaries
195                                   are, in the archive.
196     :return: the path of the produced launcher
197     :rtype: str
198     '''
199     
200     # get KERNEL installation path 
201     kernel_info = src.product.get_product_config(config, "KERNEL")
202     kernel_base_name=os.path.basename(kernel_info.install_dir)
203     if kernel_base_name.startswith("config"):
204         # case of kernel installed in base. We remove "config-i"
205         kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
206     
207     kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
208
209     # set kernel bin dir (considering fhs property)
210     kernel_cfg = src.product.get_product_config(config, "KERNEL")
211     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
212         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
213     else:
214         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
215
216     # check if the application contains an application module
217     # check also if the application has a distene product, 
218     # in this case get its licence file name
219     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
220     salome_application_name="Not defined" 
221     distene_licence_file_name=False
222     for prod_name, prod_info in l_product_info:
223         # look for a "salome application" and a distene product
224         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
225             distene_licence_file_name = src.product.product_has_licence(prod_info, 
226                                             config.PATHS.LICENCEPATH) 
227         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
228             salome_application_name=prod_info.name
229
230     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
231     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
232     if salome_application_name == "Not defined":
233         app_root_dir=kernel_root_dir
234     else:
235         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
236
237     additional_env={}
238     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
239                                                    config.VARS.sep + bin_kernel_install_dir
240     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
241         additional_env['sat_python_version'] = 3
242     else:
243         additional_env['sat_python_version'] = 2
244
245     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
246
247     # create an environment file writer
248     writer = src.environment.FileEnvWriter(config,
249                                            logger,
250                                            file_dir,
251                                            src_root=None,
252                                            env_info=None)
253     
254     filepath = os.path.join(file_dir, file_name)
255     # Write
256     writer.write_env_file(filepath,
257                           False,  # for launch
258                           "cfgForPy",
259                           additional_env=additional_env,
260                           no_path_init="False",
261                           for_package = binaries_dir_name)
262     
263     # Little hack to put out_dir_Path outside the strings
264     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
265     src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
266     
267     # A hack to put a call to a file for distene licence.
268     # It does nothing to an application that has no distene product
269     if distene_licence_file_name:
270         logger.write("Application has a distene licence file! We use it in package launcher", 5)
271         hack_for_distene_licence(filepath, distene_licence_file_name)
272        
273     # change the rights in order to make the file executable for everybody
274     os.chmod(filepath,
275              stat.S_IRUSR |
276              stat.S_IRGRP |
277              stat.S_IROTH |
278              stat.S_IWUSR |
279              stat.S_IXUSR |
280              stat.S_IXGRP |
281              stat.S_IXOTH)
282
283     return filepath
284
285 def hack_for_distene_licence(filepath, licence_file):
286     '''Replace the distene licence env variable by a call to a file.
287     
288     :param filepath Str: The path to the launcher to modify.
289     '''  
290     shutil.move(filepath, filepath + "_old")
291     fileout= filepath
292     filein = filepath + "_old"
293     fin = open(filein, "r")
294     fout = open(fileout, "w")
295     text = fin.readlines()
296     # Find the Distene section
297     num_line = -1
298     for i,line in enumerate(text):
299         if "# Set DISTENE License" in line:
300             num_line = i
301             break
302     if num_line == -1:
303         # No distene product, there is nothing to do
304         fin.close()
305         for line in text:
306             fout.write(line)
307         fout.close()
308         return
309     del text[num_line +1]
310     del text[num_line +1]
311     text_to_insert ="""    try:
312         distene_licence_file=r"%s"
313         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
314             import importlib.util
315             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
316             distene=importlib.util.module_from_spec(spec_dist)
317             spec_dist.loader.exec_module(distene)
318         else:
319             import imp
320             distene = imp.load_source('distene_licence', distene_licence_file)
321         distene.set_distene_variables(context)
322     except:
323         pass\n"""  % licence_file
324     text.insert(num_line + 1, text_to_insert)
325     for line in text:
326         fout.write(line)
327     fin.close()    
328     fout.close()
329     return
330     
331 def produce_relative_env_files(config,
332                               logger,
333                               file_dir,
334                               binaries_dir_name):
335     '''Create some specific environment files for the binary package. These 
336        files use relative paths.
337     
338     :param config Config: The global configuration.
339     :param logger Logger: the logging instance
340     :param file_dir str: the directory where to put the files
341     :param binaries_dir_name str: the name of the repository where the binaries
342                                   are, in the archive.
343     :return: the list of path of the produced environment files
344     :rtype: List
345     '''  
346     # create an environment file writer
347     writer = src.environment.FileEnvWriter(config,
348                                            logger,
349                                            file_dir,
350                                            src_root=None)
351     
352     if src.architecture.is_windows():
353       shell = "bat"
354       filename  = "env_launch.bat"
355     else:
356       shell = "bash"
357       filename  = "env_launch.sh"
358
359     # Write
360     filepath = writer.write_env_file(filename,
361                           False, # for launch
362                           shell,
363                           for_package = binaries_dir_name)
364
365     # Little hack to put out_dir_Path as environment variable
366     if src.architecture.is_windows() :
367       src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
368       src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
369       src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
370     else:
371       src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
372       src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
373
374     # change the rights in order to make the file executable for everybody
375     os.chmod(filepath,
376              stat.S_IRUSR |
377              stat.S_IRGRP |
378              stat.S_IROTH |
379              stat.S_IWUSR |
380              stat.S_IXUSR |
381              stat.S_IXGRP |
382              stat.S_IXOTH)
383     
384     return filepath
385
386 def produce_install_bin_file(config,
387                              logger,
388                              file_dir,
389                              d_sub,
390                              file_name):
391     '''Create a bash shell script which do substitutions in BIRARIES dir 
392        in order to use it for extra compilations.
393     
394     :param config Config: The global configuration.
395     :param logger Logger: the logging instance
396     :param file_dir str: the directory where to put the files
397     :param d_sub, dict: the dictionnary that contains the substitutions to be done
398     :param file_name str: the name of the install script file
399     :return: the produced file
400     :rtype: str
401     '''  
402     # Write
403     filepath = os.path.join(file_dir, file_name)
404     # open the file and write into it
405     # use codec utf-8 as sat variables are in unicode
406     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
407         installbin_template_path = os.path.join(config.VARS.internal_dir,
408                                         "INSTALL_BIN.template")
409         
410         # build the name of the directory that will contain the binaries
411         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
412         # build the substitution loop
413         loop_cmd = "for f in $(grep -RIl"
414         for key in d_sub:
415             loop_cmd += " -e "+ key
416         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
417                     '); do\n     sed -i "\n'
418         for key in d_sub:
419             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
420         loop_cmd += '            " $f\ndone'
421
422         d={}
423         d["BINARIES_DIR"] = binaries_dir_name
424         d["SUBSTITUTION_LOOP"]=loop_cmd
425         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
426         
427         # substitute the template and write it in file
428         content=src.template.substitute(installbin_template_path, d)
429         installbin_file.write(content)
430         # change the rights in order to make the file executable for everybody
431         os.chmod(filepath,
432                  stat.S_IRUSR |
433                  stat.S_IRGRP |
434                  stat.S_IROTH |
435                  stat.S_IWUSR |
436                  stat.S_IXUSR |
437                  stat.S_IXGRP |
438                  stat.S_IXOTH)
439     
440     return filepath
441
442 def product_appli_creation_script(config,
443                                   logger,
444                                   file_dir,
445                                   binaries_dir_name):
446     '''Create a script that can produce an application (EDF style) in the binary
447        package.
448     
449     :param config Config: The global configuration.
450     :param logger Logger: the logging instance
451     :param file_dir str: the directory where to put the file
452     :param binaries_dir_name str: the name of the repository where the binaries
453                                   are, in the archive.
454     :return: the path of the produced script file
455     :rtype: Str
456     '''
457     template_name = "create_appli.py.for_bin_packages.template"
458     template_path = os.path.join(config.VARS.internal_dir, template_name)
459     text_to_fill = open(template_path, "r").read()
460     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
461                                         '"' + binaries_dir_name + '"')
462     
463     text_to_add = ""
464     for product_name in get_SALOME_modules(config):
465         product_info = src.product.get_product_config(config, product_name)
466        
467         if src.product.product_is_smesh_plugin(product_info):
468             continue
469
470         if 'install_dir' in product_info and bool(product_info.install_dir):
471             if src.product.product_is_cpp(product_info):
472                 # cpp module
473                 for cpp_name in src.product.get_product_components(product_info):
474                     line_to_add = ("<module name=\"" + 
475                                    cpp_name + 
476                                    "\" gui=\"yes\" path=\"''' + "
477                                    "os.path.join(dir_bin_name, \"" + 
478                                    cpp_name + "\") + '''\"/>")
479             else:
480                 # regular module
481                 line_to_add = ("<module name=\"" + 
482                                product_name + 
483                                "\" gui=\"yes\" path=\"''' + "
484                                "os.path.join(dir_bin_name, \"" + 
485                                product_name + "\") + '''\"/>")
486             text_to_add += line_to_add + "\n"
487     
488     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
489     
490     tmp_file_path = os.path.join(file_dir, "create_appli.py")
491     ff = open(tmp_file_path, "w")
492     ff.write(filled_text)
493     ff.close()
494     
495     # change the rights in order to make the file executable for everybody
496     os.chmod(tmp_file_path,
497              stat.S_IRUSR |
498              stat.S_IRGRP |
499              stat.S_IROTH |
500              stat.S_IWUSR |
501              stat.S_IXUSR |
502              stat.S_IXGRP |
503              stat.S_IXOTH)
504     
505     return tmp_file_path
506
507 def binary_package(config, logger, options, tmp_working_dir):
508     '''Prepare a dictionary that stores all the needed directories and files to
509        add in a binary package.
510     
511     :param config Config: The global configuration.
512     :param logger Logger: the logging instance
513     :param options OptResult: the options of the launched command
514     :param tmp_working_dir str: The temporary local directory containing some 
515                                 specific directories or files needed in the 
516                                 binary package
517     :return: the dictionary that stores all the needed directories and files to
518              add in a binary package.
519              {label : (path_on_local_machine, path_in_archive)}
520     :rtype: dict
521     '''
522
523     # Get the list of product installation to add to the archive
524     l_products_name = sorted(config.APPLICATION.products.keys())
525     l_product_info = src.product.get_products_infos(l_products_name,
526                                                     config)
527     l_install_dir = []
528     l_source_dir = []
529     l_not_installed = []
530     l_sources_not_present = []
531     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
532     if ("APPLICATION" in config  and
533         "properties"  in config.APPLICATION  and
534         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
535         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
536             generate_mesa_launcher=True
537
538     for prod_name, prod_info in l_product_info:
539         # skip product with property not_in_package set to yes
540         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
541             continue  
542
543         # Add the sources of the products that have the property 
544         # sources_in_package : "yes"
545         if src.get_property_in_product_cfg(prod_info,
546                                            "sources_in_package") == "yes":
547             if os.path.exists(prod_info.source_dir):
548                 l_source_dir.append((prod_name, prod_info.source_dir))
549             else:
550                 l_sources_not_present.append(prod_name)
551
552         # ignore the native and fixed products for install directories
553         if (src.product.product_is_native(prod_info) 
554                 or src.product.product_is_fixed(prod_info)
555                 or not src.product.product_compiles(prod_info)):
556             continue
557         if src.product.check_installation(config, prod_info):
558             l_install_dir.append((prod_name, prod_info.install_dir))
559         else:
560             l_not_installed.append(prod_name)
561         
562         # Add also the cpp generated modules (if any)
563         if src.product.product_is_cpp(prod_info):
564             # cpp module
565             for name_cpp in src.product.get_product_components(prod_info):
566                 install_dir = os.path.join(config.APPLICATION.workdir,
567                                            config.INTERNAL.config.install_dir,
568                                            name_cpp) 
569                 if os.path.exists(install_dir):
570                     l_install_dir.append((name_cpp, install_dir))
571                 else:
572                     l_not_installed.append(name_cpp)
573         
574     # check the name of the directory that (could) contains the binaries 
575     # from previous detar
576     binaries_from_detar = os.path.join(
577                               config.APPLICATION.workdir,
578                               config.INTERNAL.config.binary_dir + config.VARS.dist)
579     if os.path.exists(binaries_from_detar):
580          logger.write("""
581 WARNING: existing binaries directory from previous detar installation:
582          %s
583          To make new package from this, you have to: 
584          1) install binaries in INSTALL directory with the script "install_bin.sh" 
585             see README file for more details
586          2) or recompile everything in INSTALL with "sat compile" command 
587             this step is long, and requires some linux packages to be installed 
588             on your system\n
589 """ % binaries_from_detar)
590     
591     # Print warning or error if there are some missing products
592     if len(l_not_installed) > 0:
593         text_missing_prods = ""
594         for p_name in l_not_installed:
595             text_missing_prods += " - " + p_name + "\n"
596         if not options.force_creation:
597             msg = _("ERROR: there are missing product installations:")
598             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
599                                      text_missing_prods),
600                          1)
601             raise src.SatException(msg)
602         else:
603             msg = _("WARNING: there are missing products installations:")
604             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
605                                      text_missing_prods),
606                          1)
607
608     # Do the same for sources
609     if len(l_sources_not_present) > 0:
610         text_missing_prods = ""
611         for p_name in l_sources_not_present:
612             text_missing_prods += "-" + p_name + "\n"
613         if not options.force_creation:
614             msg = _("ERROR: there are missing product sources:")
615             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
616                                      text_missing_prods),
617                          1)
618             raise src.SatException(msg)
619         else:
620             msg = _("WARNING: there are missing products sources:")
621             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
622                                      text_missing_prods),
623                          1)
624  
625     # construct the name of the directory that will contain the binaries
626     if src.architecture.is_windows():
627         binaries_dir_name = config.INTERNAL.config.binary_dir
628     else:
629         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
630     # construct the correlation table between the product names, there 
631     # actual install directories and there install directory in archive
632     d_products = {}
633     for prod_name, install_dir in l_install_dir:
634         prod_base_name=os.path.basename(install_dir)
635         if prod_base_name.startswith("config"):
636             # case of a products installed in base. We remove "config-i"
637             prod_base_name=os.path.basename(os.path.dirname(install_dir))
638         path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
639         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
640         
641     for prod_name, source_dir in l_source_dir:
642         path_in_archive = os.path.join("SOURCES", prod_name)
643         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
644
645     # for packages of SALOME applications including KERNEL, 
646     # we produce a salome launcher or a virtual application (depending on salome version)
647     if 'KERNEL' in config.APPLICATION.products:
648         VersionSalome = src.get_salome_version(config)
649         # Case where SALOME has the launcher that uses the SalomeContext API
650         if VersionSalome >= 730:
651             # create the relative launcher and add it to the files to add
652             launcher_name = src.get_launcher_name(config)
653             launcher_package = produce_relative_launcher(config,
654                                                  logger,
655                                                  tmp_working_dir,
656                                                  launcher_name,
657                                                  binaries_dir_name)
658             d_products["launcher"] = (launcher_package, launcher_name)
659
660             # if the application contains mesa products, we generate in addition to the 
661             # classical salome launcher a launcher using mesa and called mesa_salome 
662             # (the mesa launcher will be used for remote usage through ssh).
663             if generate_mesa_launcher:
664                 #if there is one : store the use_mesa property
665                 restore_use_mesa_option=None
666                 if ('properties' in config.APPLICATION and 
667                     'use_mesa' in config.APPLICATION.properties):
668                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
669
670                 # activate mesa property, and generate a mesa launcher
671                 src.activate_mesa_property(config)  #activate use_mesa property
672                 launcher_mesa_name="mesa_"+launcher_name
673                 launcher_package_mesa = produce_relative_launcher(config,
674                                                      logger,
675                                                      tmp_working_dir,
676                                                      launcher_mesa_name,
677                                                      binaries_dir_name)
678                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
679
680                 # if there was a use_mesa value, we restore it
681                 # else we set it to the default value "no"
682                 if restore_use_mesa_option != None:
683                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
684                 else:
685                     config.APPLICATION.properties.use_mesa="no"
686
687             if options.sources:
688                 # if we mix binaries and sources, we add a copy of the launcher, 
689                 # prefixed  with "bin",in order to avoid clashes
690                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
691         else:
692             # Provide a script for the creation of an application EDF style
693             appli_script = product_appli_creation_script(config,
694                                                         logger,
695                                                         tmp_working_dir,
696                                                         binaries_dir_name)
697             
698             d_products["appli script"] = (appli_script, "create_appli.py")
699
700     # Put also the environment file
701     env_file = produce_relative_env_files(config,
702                                            logger,
703                                            tmp_working_dir,
704                                            binaries_dir_name)
705
706     if src.architecture.is_windows():
707       filename  = "env_launch.bat"
708     else:
709       filename  = "env_launch.sh"
710     d_products["environment file"] = (env_file, filename)      
711     return d_products
712
713 def source_package(sat, config, logger, options, tmp_working_dir):
714     '''Prepare a dictionary that stores all the needed directories and files to
715        add in a source package.
716     
717     :param config Config: The global configuration.
718     :param logger Logger: the logging instance
719     :param options OptResult: the options of the launched command
720     :param tmp_working_dir str: The temporary local directory containing some 
721                                 specific directories or files needed in the 
722                                 binary package
723     :return: the dictionary that stores all the needed directories and files to
724              add in a source package.
725              {label : (path_on_local_machine, path_in_archive)}
726     :rtype: dict
727     '''
728     
729     d_archives={}
730     # Get all the products that are prepared using an archive
731     # unless ftp mode is specified (in this case the user of the
732     # archive will get the sources through the ftp mode of sat prepare
733     if not options.ftp:
734         logger.write("Find archive products ... ")
735         d_archives, l_pinfo_vcs = get_archives(config, logger)
736         logger.write("Done\n")
737
738     d_archives_vcs = {}
739     if not options.with_vcs and len(l_pinfo_vcs) > 0:
740         # Make archives with the products that are not prepared using an archive
741         # (git, cvs, svn, etc)
742         logger.write("Construct archives for vcs products ... ")
743         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
744                                           sat,
745                                           config,
746                                           logger,
747                                           tmp_working_dir)
748         logger.write("Done\n")
749
750     # Create a project
751     logger.write("Create the project ... ")
752     d_project = create_project_for_src_package(config,
753                                                tmp_working_dir,
754                                                options.with_vcs,
755                                                options.ftp)
756     logger.write("Done\n")
757     
758     # Add salomeTools
759     tmp_sat = add_salomeTools(config, tmp_working_dir)
760     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
761     
762     # Add a sat symbolic link if not win
763     if not src.architecture.is_windows():
764         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
765         try:
766             t = os.getcwd()
767         except:
768             # In the jobs, os.getcwd() can fail
769             t = config.LOCAL.workdir
770         os.chdir(tmp_working_dir)
771         if os.path.lexists(tmp_satlink_path):
772             os.remove(tmp_satlink_path)
773         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
774         os.chdir(t)
775         
776         d_sat["sat link"] = (tmp_satlink_path, "sat")
777     
778     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
779     return d_source
780
781 def get_archives(config, logger):
782     '''Find all the products that are get using an archive and all the products
783        that are get using a vcs (git, cvs, svn) repository.
784     
785     :param config Config: The global configuration.
786     :param logger Logger: the logging instance
787     :return: the dictionary {name_product : 
788              (local path of its archive, path in the package of its archive )}
789              and the list of specific configuration corresponding to the vcs 
790              products
791     :rtype: (Dict, List)
792     '''
793     # Get the list of product informations
794     l_products_name = config.APPLICATION.products.keys()
795     l_product_info = src.product.get_products_infos(l_products_name,
796                                                     config)
797     d_archives = {}
798     l_pinfo_vcs = []
799     for p_name, p_info in l_product_info:
800         # skip product with property not_in_package set to yes
801         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
802             continue  
803         # ignore the native and fixed products
804         if (src.product.product_is_native(p_info) 
805                 or src.product.product_is_fixed(p_info)):
806             continue
807         if p_info.get_source == "archive":
808             archive_path = p_info.archive_info.archive_name
809             archive_name = os.path.basename(archive_path)
810             d_archives[p_name] = (archive_path,
811                                   os.path.join(ARCHIVE_DIR, archive_name))
812             if (src.appli_test_property(config,"pip", "yes") and 
813                 src.product.product_test_property(p_info,"pip", "yes")):
814                 # if pip mode is activated, and product is managed by pip
815                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
816                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
817                     "%s-%s*" % (p_info.name, p_info.version))
818                 pip_wheel_path=glob.glob(pip_wheel_pattern)
819                 msg_pip_not_found="Error in get_archive, pip wheel for "\
820                                   "product %s-%s was not found in %s directory"
821                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
822                                   "product %s-%s were found in %s directory"
823                 if len(pip_wheel_path)==0:
824                     raise src.SatException(msg_pip_not_found %\
825                         (p_info.name, p_info.version, pip_wheels_dir))
826                 if len(pip_wheel_path)>1:
827                     raise src.SatException(msg_pip_two_or_more %\
828                         (p_info.name, p_info.version, pip_wheels_dir))
829
830                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
831                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
832                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
833         else:
834             # this product is not managed by archive, 
835             # an archive of the vcs directory will be created by get_archive_vcs
836             l_pinfo_vcs.append((p_name, p_info)) 
837             
838     return d_archives, l_pinfo_vcs
839
840 def add_salomeTools(config, tmp_working_dir):
841     '''Prepare a version of salomeTools that has a specific local.pyconf file 
842        configured for a source package.
843
844     :param config Config: The global configuration.
845     :param tmp_working_dir str: The temporary local directory containing some 
846                                 specific directories or files needed in the 
847                                 source package
848     :return: The path to the local salomeTools directory to add in the package
849     :rtype: str
850     '''
851     # Copy sat in the temporary working directory
852     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
853     sat_running_path = src.Path(config.VARS.salometoolsway)
854     sat_running_path.copy(sat_tmp_path)
855     
856     # Update the local.pyconf file that contains the path to the project
857     local_pyconf_name = "local.pyconf"
858     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
859     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
860     # Remove the .pyconf file in the root directory of salomeTools if there is
861     # any. (For example when launching jobs, a pyconf file describing the jobs 
862     # can be here and is not useful) 
863     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
864     for file_or_dir in files_or_dir_SAT:
865         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
866             file_path = os.path.join(tmp_working_dir,
867                                      "salomeTools",
868                                      file_or_dir)
869             os.remove(file_path)
870     
871     ff = open(local_pyconf_file, "w")
872     ff.write(LOCAL_TEMPLATE)
873     ff.close()
874     
875     return sat_tmp_path.path
876
877 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
878     '''For sources package that require that all products are get using an 
879        archive, one has to create some archive for the vcs products.
880        So this method calls the clean and source command of sat and then create
881        the archives.
882
883     :param l_pinfo_vcs List: The list of specific configuration corresponding to
884                              each vcs product
885     :param sat Sat: The Sat instance that can be called to clean and source the
886                     products
887     :param config Config: The global configuration.
888     :param logger Logger: the logging instance
889     :param tmp_working_dir str: The temporary local directory containing some 
890                                 specific directories or files needed in the 
891                                 source package
892     :return: the dictionary that stores all the archives to add in the source 
893              package. {label : (path_on_local_machine, path_in_archive)}
894     :rtype: dict
895     '''
896     # clean the source directory of all the vcs products, then use the source 
897     # command and thus construct an archive that will not contain the patches
898     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
899     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
900       logger.write(_("\nclean sources\n"))
901       args_clean = config.VARS.application
902       args_clean += " --sources --products "
903       args_clean += ",".join(l_prod_names)
904       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
905       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
906     if True:
907       # source
908       logger.write(_("get sources\n"))
909       args_source = config.VARS.application
910       args_source += " --products "
911       args_source += ",".join(l_prod_names)
912       svgDir = sat.cfg.APPLICATION.workdir
913       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
914       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
915       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
916       # DBG.write("sat config id", id(sat.cfg), True)
917       # shit as config is not same id() as for sat.source()
918       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
919       import source
920       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
921       
922       # make the new archives
923       d_archives_vcs = {}
924       for pn, pinfo in l_pinfo_vcs:
925           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
926           logger.write("make archive vcs '%s'\n" % path_archive)
927           d_archives_vcs[pn] = (path_archive,
928                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
929       sat.cfg.APPLICATION.workdir = svgDir
930       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
931     return d_archives_vcs
932
933 def make_archive(prod_name, prod_info, where):
934     '''Create an archive of a product by searching its source directory.
935
936     :param prod_name str: The name of the product.
937     :param prod_info Config: The specific configuration corresponding to the 
938                              product
939     :param where str: The path of the repository where to put the resulting 
940                       archive
941     :return: The path of the resulting archive
942     :rtype: str
943     '''
944     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
945     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
946     local_path = prod_info.source_dir
947     tar_prod.add(local_path,
948                  arcname=prod_name,
949                  filter=exclude_VCS_and_extensions)
950     tar_prod.close()
951     return path_targz_prod       
952
953 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
954     '''Create a specific project for a source package.
955
956     :param config Config: The global configuration.
957     :param tmp_working_dir str: The temporary local directory containing some 
958                                 specific directories or files needed in the 
959                                 source package
960     :param with_vcs boolean: True if the package is with vcs products (not 
961                              transformed into archive products)
962     :param with_ftp boolean: True if the package use ftp servers to get archives
963     :return: The dictionary 
964              {"project" : (produced project, project path in the archive)}
965     :rtype: Dict
966     '''
967
968     # Create in the working temporary directory the full project tree
969     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
970     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
971                                          "products")
972     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
973                                          "products",
974                                          "compil_scripts")
975     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
976                                          "products",
977                                          "env_scripts")
978     patches_tmp_dir = os.path.join(project_tmp_dir,
979                                          "products",
980                                          "patches")
981     application_tmp_dir = os.path.join(project_tmp_dir,
982                                          "applications")
983     for directory in [project_tmp_dir,
984                       compil_scripts_tmp_dir,
985                       env_scripts_tmp_dir,
986                       patches_tmp_dir,
987                       application_tmp_dir]:
988         src.ensure_path_exists(directory)
989
990     # Create the pyconf that contains the information of the project
991     project_pyconf_name = "project.pyconf"        
992     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
993     ff = open(project_pyconf_file, "w")
994     ff.write(PROJECT_TEMPLATE)
995     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
996         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
997         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
998             ftp_path=ftp_path+":"+ftpserver
999         ftp_path+='"'
1000         ff.write("# ftp servers where to search for prerequisite archives\n")
1001         ff.write(ftp_path)
1002     # add licence paths if any
1003     if len(config.PATHS.LICENCEPATH) > 0:  
1004         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1005         for path in config.PATHS.LICENCEPATH[1:]:
1006             licence_path=licence_path+":"+path
1007         licence_path+='"'
1008         ff.write("\n# Where to search for licences\n")
1009         ff.write(licence_path)
1010         
1011
1012     ff.close()
1013     
1014     # Loop over the products to get there pyconf and all the scripts 
1015     # (compilation, environment, patches)
1016     # and create the pyconf file to add to the project
1017     lproducts_name = config.APPLICATION.products.keys()
1018     l_products = src.product.get_products_infos(lproducts_name, config)
1019     for p_name, p_info in l_products:
1020         # skip product with property not_in_package set to yes
1021         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1022             continue  
1023         find_product_scripts_and_pyconf(p_name,
1024                                         p_info,
1025                                         config,
1026                                         with_vcs,
1027                                         compil_scripts_tmp_dir,
1028                                         env_scripts_tmp_dir,
1029                                         patches_tmp_dir,
1030                                         products_pyconf_tmp_dir)
1031     
1032     # for the application pyconf, we write directly the config
1033     # don't search for the original pyconf file
1034     # to avoid problems with overwrite sections and rm_products key
1035     write_application_pyconf(config, application_tmp_dir)
1036     
1037     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1038     return d_project
1039
1040 def find_product_scripts_and_pyconf(p_name,
1041                                     p_info,
1042                                     config,
1043                                     with_vcs,
1044                                     compil_scripts_tmp_dir,
1045                                     env_scripts_tmp_dir,
1046                                     patches_tmp_dir,
1047                                     products_pyconf_tmp_dir):
1048     '''Create a specific pyconf file for a given product. Get its environment 
1049        script, its compilation script and patches and put it in the temporary
1050        working directory. This method is used in the source package in order to
1051        construct the specific project.
1052
1053     :param p_name str: The name of the product.
1054     :param p_info Config: The specific configuration corresponding to the 
1055                              product
1056     :param config Config: The global configuration.
1057     :param with_vcs boolean: True if the package is with vcs products (not 
1058                              transformed into archive products)
1059     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1060                                        scripts directory of the project.
1061     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1062                                     directory of the project.
1063     :param patches_tmp_dir str: The path to the temporary patch scripts 
1064                                 directory of the project.
1065     :param products_pyconf_tmp_dir str: The path to the temporary product 
1066                                         scripts directory of the project.
1067     '''
1068     
1069     # read the pyconf of the product
1070     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1071
1072     # find the compilation script if any
1073     if src.product.product_has_script(p_info):
1074         compil_script_path = src.Path(p_info.compil_script)
1075         compil_script_path.copy(compil_scripts_tmp_dir)
1076
1077     # find the environment script if any
1078     if src.product.product_has_env_script(p_info):
1079         env_script_path = src.Path(p_info.environ.env_script)
1080         env_script_path.copy(env_scripts_tmp_dir)
1081
1082     # find the patches if any
1083     if src.product.product_has_patches(p_info):
1084         patches = src.pyconf.Sequence()
1085         for patch_path in p_info.patches:
1086             p_path = src.Path(patch_path)
1087             p_path.copy(patches_tmp_dir)
1088             patches.append(os.path.basename(patch_path), "")
1089
1090     if (not with_vcs) and src.product.product_is_vcs(p_info):
1091         # in non vcs mode, if the product is not archive, then make it become archive.
1092
1093         # depending upon the incremental mode, select impacted sections
1094         if "properties" in p_info and "incremental" in p_info.properties and\
1095             p_info.properties.incremental == "yes":
1096             sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1097         else:
1098             sections = [p_info.section]
1099         for section in sections:
1100             if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1101                 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1102                           (p_name,section))
1103                 product_pyconf_cfg[section].get_source = "archive"
1104                 if not "archive_info" in product_pyconf_cfg[section]:
1105                     product_pyconf_cfg[section].addMapping("archive_info",
1106                                         src.pyconf.Mapping(product_pyconf_cfg),
1107                                         "")
1108                     product_pyconf_cfg[section].archive_info.archive_name =\
1109                         p_info.name + ".tgz"
1110     
1111     # write the pyconf file to the temporary project location
1112     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1113                                            p_name + ".pyconf")
1114     ff = open(product_tmp_pyconf_path, 'w')
1115     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1116     product_pyconf_cfg.__save__(ff, 1)
1117     ff.close()
1118
1119
1120 def write_application_pyconf(config, application_tmp_dir):
1121     '''Write the application pyconf file in the specific temporary 
1122        directory containing the specific project of a source package.
1123
1124     :param config Config: The global configuration.
1125     :param application_tmp_dir str: The path to the temporary application 
1126                                     scripts directory of the project.
1127     '''
1128     application_name = config.VARS.application
1129     # write the pyconf file to the temporary application location
1130     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1131                                                application_name + ".pyconf")
1132     with open(application_tmp_pyconf_path, 'w') as f:
1133         f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1134         res = src.pyconf.Config()
1135         app = src.pyconf.deepCopyMapping(config.APPLICATION)
1136         # no base in packages
1137         if "base" in app:
1138             app.base = "no" 
1139         # Change the workdir
1140         app.workdir = src.pyconf.Reference(
1141                                  app,
1142                                  src.pyconf.DOLLAR,
1143                                  'VARS.salometoolsway + $VARS.sep + ".."')
1144         res.addMapping("APPLICATION", app, "")
1145         res.__save__(f, evaluated=False)
1146     
1147
1148 def sat_package(config, tmp_working_dir, options, logger):
1149     '''Prepare a dictionary that stores all the needed directories and files to
1150        add in a salomeTool package.
1151     
1152     :param tmp_working_dir str: The temporary local working directory 
1153     :param options OptResult: the options of the launched command
1154     :return: the dictionary that stores all the needed directories and files to
1155              add in a salomeTool package.
1156              {label : (path_on_local_machine, path_in_archive)}
1157     :rtype: dict
1158     '''
1159     d_project = {}
1160
1161     # we include sat himself
1162     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1163
1164     # and we overwrite local.pyconf with a clean wersion.
1165     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1166     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1167     local_cfg = src.pyconf.Config(local_file_path)
1168     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1169     local_cfg.LOCAL["base"] = "default"
1170     local_cfg.LOCAL["workdir"] = "default"
1171     local_cfg.LOCAL["log_dir"] = "default"
1172     local_cfg.LOCAL["archive_dir"] = "default"
1173     local_cfg.LOCAL["VCS"] = "None"
1174     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1175
1176     # if the archive contains a project, we write its relative path in local.pyconf
1177     if options.project:
1178         project_arch_path = os.path.join("projects", options.project, 
1179                                          os.path.basename(options.project_file_path))
1180         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1181
1182     ff = open(local_pyconf_tmp_path, 'w')
1183     local_cfg.__save__(ff, 1)
1184     ff.close()
1185     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1186     return d_project
1187     
1188
1189 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1190     '''Prepare a dictionary that stores all the needed directories and files to
1191        add in a project package.
1192     
1193     :param project_file_path str: The path to the local project.
1194     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1195     :param tmp_working_dir str: The temporary local directory containing some 
1196                                 specific directories or files needed in the 
1197                                 project package
1198     :param embedded_in_sat boolean : the project package is embedded in a sat package
1199     :return: the dictionary that stores all the needed directories and files to
1200              add in a project package.
1201              {label : (path_on_local_machine, path_in_archive)}
1202     :rtype: dict
1203     '''
1204     d_project = {}
1205     # Read the project file and get the directories to add to the package
1206     
1207     try: 
1208       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1209     except:
1210       logger.write("""
1211 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1212       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1213       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1214     
1215     paths = {"APPLICATIONPATH" : "applications",
1216              "PRODUCTPATH" : "products",
1217              "JOBPATH" : "jobs",
1218              "MACHINEPATH" : "machines"}
1219     if not ftp_mode:
1220         paths["ARCHIVEPATH"] = "archives"
1221
1222     # Loop over the project paths and add it
1223     project_file_name = os.path.basename(project_file_path)
1224     for path in paths:
1225         if path not in project_pyconf_cfg:
1226             continue
1227         if embedded_in_sat:
1228             dest_path = os.path.join("projects", name_project, paths[path])
1229             project_file_dest = os.path.join("projects", name_project, project_file_name)
1230         else:
1231             dest_path = paths[path]
1232             project_file_dest = project_file_name
1233
1234         # Add the directory to the files to add in the package
1235         d_project[path] = (project_pyconf_cfg[path], dest_path)
1236
1237         # Modify the value of the path in the package
1238         project_pyconf_cfg[path] = src.pyconf.Reference(
1239                                     project_pyconf_cfg,
1240                                     src.pyconf.DOLLAR,
1241                                     'project_path + "/' + paths[path] + '"')
1242     
1243     # Modify some values
1244     if "project_path" not in project_pyconf_cfg:
1245         project_pyconf_cfg.addMapping("project_path",
1246                                       src.pyconf.Mapping(project_pyconf_cfg),
1247                                       "")
1248     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1249                                                            src.pyconf.DOLLAR,
1250                                                            'PWD')
1251     # we don't want to export these two fields
1252     project_pyconf_cfg.__delitem__("file_path")
1253     project_pyconf_cfg.__delitem__("PWD")
1254     if ftp_mode:
1255         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1256     
1257     # Write the project pyconf file
1258     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1259     ff = open(project_pyconf_tmp_path, 'w')
1260     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1261     project_pyconf_cfg.__save__(ff, 1)
1262     ff.close()
1263     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1264     
1265     return d_project
1266
1267 def add_readme(config, options, where):
1268     readme_path = os.path.join(where, "README")
1269     with codecs.open(readme_path, "w", 'utf-8') as f:
1270
1271     # templates for building the header
1272         readme_header="""
1273 # This package was generated with sat $version
1274 # Date: $date
1275 # User: $user
1276 # Distribution : $dist
1277
1278 In the following, $$ROOT represents the directory where you have installed 
1279 SALOME (the directory where this file is located).
1280
1281 """
1282         if src.architecture.is_windows():
1283             readme_header = readme_header.replace('$$ROOT','%ROOT%')
1284         readme_compilation_with_binaries="""
1285
1286 compilation based on the binaries used as prerequisites
1287 =======================================================
1288
1289 If you fail to compile the complete application (for example because
1290 you are not root on your system and cannot install missing packages), you
1291 may try a partial compilation based on the binaries.
1292 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1293 and do some substitutions on cmake and .la files (replace the build directories
1294 with local paths).
1295 The procedure to do it is:
1296  1) Remove or rename INSTALL directory if it exists
1297  2) Execute the shell script install_bin.sh:
1298  > cd $ROOT
1299  > ./install_bin.sh
1300  3) Use SalomeTool (as explained in Sources section) and compile only the 
1301     modules you need to (with -p option)
1302
1303 """
1304         readme_header_tpl=string.Template(readme_header)
1305         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1306                 "README_BIN.template")
1307         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1308                 "README_LAUNCHER.template")
1309         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1310                 "README_BIN_VIRTUAL_APP.template")
1311         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1312                 "README_SRC.template")
1313         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1314                 "README_PROJECT.template")
1315         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1316                 "README_SAT.template")
1317
1318         # prepare substitution dictionary
1319         d = dict()
1320         d['user'] = config.VARS.user
1321         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1322         d['version'] = src.get_salometool_version(config)
1323         d['dist'] = config.VARS.dist
1324         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1325
1326         if options.binaries or options.sources:
1327             d['application'] = config.VARS.application
1328             d['BINARIES']    = config.INTERNAL.config.binary_dir
1329             d['SEPARATOR'] = config.VARS.sep
1330             if src.architecture.is_windows():
1331                 d['operatingSystem'] = 'Windows'
1332                 d['PYTHON3'] = 'python3'
1333                 d['ROOT']    = '%ROOT%'
1334             else:
1335                 d['operatingSystem'] = 'Linux'
1336                 d['PYTHON3'] = ''
1337                 d['ROOT']    = '$ROOT'
1338             f.write("# Application: " + d['application'] + "\n")
1339             if 'KERNEL' in config.APPLICATION.products:
1340                 VersionSalome = src.get_salome_version(config)
1341                 # Case where SALOME has the launcher that uses the SalomeContext API
1342                 if VersionSalome >= 730:
1343                     d['launcher'] = config.APPLICATION.profile.launcher_name
1344                 else:
1345                     d['virtual_app'] = 'runAppli' # this info is not used now)
1346
1347         # write the specific sections
1348         if options.binaries:
1349             f.write(src.template.substitute(readme_template_path_bin, d))
1350             if "virtual_app" in d:
1351                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1352             if "launcher" in d:
1353                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1354
1355         if options.sources:
1356             f.write(src.template.substitute(readme_template_path_src, d))
1357
1358         if options.binaries and options.sources and not src.architecture.is_windows():
1359             f.write(readme_compilation_with_binaries)
1360
1361         if options.project:
1362             f.write(src.template.substitute(readme_template_path_pro, d))
1363
1364         if options.sat:
1365             f.write(src.template.substitute(readme_template_path_sat, d))
1366     
1367     return readme_path
1368
1369 def update_config(config, logger,  prop, value):
1370     '''Remove from config.APPLICATION.products the products that have the property given as input.
1371     
1372     :param config Config: The global config.
1373     :param prop str: The property to filter
1374     :param value str: The value of the property to filter
1375     '''
1376     # if there is no APPLICATION (ex sat package -t) : nothing to do
1377     if "APPLICATION" in config:
1378         l_product_to_remove = []
1379         for product_name in config.APPLICATION.products.keys():
1380             prod_cfg = src.product.get_product_config(config, product_name)
1381             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1382                 l_product_to_remove.append(product_name)
1383         for product_name in l_product_to_remove:
1384             config.APPLICATION.products.__delitem__(product_name)
1385             logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1386
1387 def description():
1388     '''method that is called when salomeTools is called with --help option.
1389     
1390     :return: The text to display for the package command description.
1391     :rtype: str
1392     '''
1393     return _("""
1394 The package command creates a tar file archive of a product.
1395 There are four kinds of archive, which can be mixed:
1396
1397  1 - The binary archive. 
1398      It contains the product installation directories plus a launcher.
1399  2 - The sources archive. 
1400      It contains the product archives, a project (the application plus salomeTools).
1401  3 - The project archive. 
1402      It contains a project (give the project file path as argument).
1403  4 - The salomeTools archive. 
1404      It contains code utility salomeTools.
1405
1406 example:
1407  >> sat package SALOME-master --binaries --sources""")
1408   
1409 def run(args, runner, logger):
1410     '''method that is called when salomeTools is called with package parameter.
1411     '''
1412     
1413     # Parse the options
1414     (options, args) = parser.parse_args(args)
1415
1416     # Check that a type of package is called, and only one
1417     all_option_types = (options.binaries,
1418                         options.sources,
1419                         options.project not in ["", None],
1420                         options.sat)
1421
1422     # Check if no option for package type
1423     if all_option_types.count(True) == 0:
1424         msg = _("Error: Precise a type for the package\nUse one of the "
1425                 "following options: --binaries, --sources, --project or"
1426                 " --salometools")
1427         logger.write(src.printcolors.printcError(msg), 1)
1428         logger.write("\n", 1)
1429         return 1
1430     
1431     # The repository where to put the package if not Binary or Source
1432     package_default_path = runner.cfg.LOCAL.workdir
1433     
1434     # if the package contains binaries or sources:
1435     if options.binaries or options.sources:
1436         # Check that the command has been called with an application
1437         src.check_config_has_application(runner.cfg)
1438
1439         # Display information
1440         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1441                                                     runner.cfg.VARS.application), 1)
1442         
1443         # Get the default directory where to put the packages
1444         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1445         src.ensure_path_exists(package_default_path)
1446         
1447     # if the package contains a project:
1448     if options.project:
1449         # check that the project is visible by SAT
1450         projectNameFile = options.project + ".pyconf"
1451         foundProject = None
1452         for i in runner.cfg.PROJECTS.project_file_paths:
1453             baseName = os.path.basename(i)
1454             if baseName == projectNameFile:
1455                 foundProject = i
1456                 break
1457
1458         if foundProject is None:
1459             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1460             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1461 known projects are:
1462 %(2)s
1463
1464 Please add it in file:
1465 %(3)s""" % \
1466                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1467             logger.write(src.printcolors.printcError(msg), 1)
1468             logger.write("\n", 1)
1469             return 1
1470         else:
1471             options.project_file_path = foundProject
1472             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1473     
1474     # Remove the products that are filtered by the --without_properties option
1475     if options.without_properties:
1476         prop, value = options.without_properties
1477         update_config(runner.cfg, logger, prop, value)
1478
1479     # Remove from config the products that have the not_in_package property
1480     update_config(runner.cfg, logger, "not_in_package", "yes")
1481
1482     # for binary packages without sources, remove compile time products
1483     if options.binaries and (not options.sources):
1484         update_config(runner.cfg, logger, "compile_time", "yes")
1485     
1486     # get the name of the archive or build it
1487     if options.name:
1488         if os.path.basename(options.name) == options.name:
1489             # only a name (not a path)
1490             archive_name = options.name           
1491             dir_name = package_default_path
1492         else:
1493             archive_name = os.path.basename(options.name)
1494             dir_name = os.path.dirname(options.name)
1495         
1496         # suppress extension
1497         if archive_name[-len(".tgz"):] == ".tgz":
1498             archive_name = archive_name[:-len(".tgz")]
1499         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1500             archive_name = archive_name[:-len(".tar.gz")]
1501         
1502     else:
1503         archive_name=""
1504         dir_name = package_default_path
1505         if options.binaries or options.sources:
1506             archive_name = runner.cfg.APPLICATION.name
1507
1508         if options.binaries:
1509             archive_name += "-"+runner.cfg.VARS.dist
1510             
1511         if options.sources:
1512             archive_name += "-SRC"
1513             if options.with_vcs:
1514                 archive_name += "-VCS"
1515
1516         if options.sat:
1517             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1518
1519         if options.project:
1520             if options.sat:
1521                 archive_name += "_" 
1522             archive_name += ("satproject_" + options.project)
1523  
1524         if len(archive_name)==0: # no option worked 
1525             msg = _("Error: Cannot name the archive\n"
1526                     " check if at least one of the following options was "
1527                     "selected : --binaries, --sources, --project or"
1528                     " --salometools")
1529             logger.write(src.printcolors.printcError(msg), 1)
1530             logger.write("\n", 1)
1531             return 1
1532  
1533     path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1534     
1535     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1536
1537     # Create a working directory for all files that are produced during the
1538     # package creation and that will be removed at the end of the command
1539     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1540     src.ensure_path_exists(tmp_working_dir)
1541     logger.write("\n", 5)
1542     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1543     
1544     logger.write("\n", 3)
1545
1546     msg = _("Preparation of files to add to the archive")
1547     logger.write(src.printcolors.printcLabel(msg), 2)
1548     logger.write("\n", 2)
1549     
1550     d_files_to_add={}  # content of the archive
1551
1552     # a dict to hold paths that will need to be substitute for users recompilations
1553     d_paths_to_substitute={}  
1554
1555     if options.binaries:
1556         d_bin_files_to_add = binary_package(runner.cfg,
1557                                             logger,
1558                                             options,
1559                                             tmp_working_dir)
1560         # for all binaries dir, store the substitution that will be required 
1561         # for extra compilations
1562         for key in d_bin_files_to_add:
1563             if key.endswith("(bin)"):
1564                 source_dir = d_bin_files_to_add[key][0]
1565                 path_in_archive = d_bin_files_to_add[key][1].replace(
1566                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1567                    runner.cfg.INTERNAL.config.install_dir)
1568                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1569                     # if basename is the same we will just substitute the dirname 
1570                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1571                         os.path.dirname(path_in_archive)
1572                 else:
1573                     d_paths_to_substitute[source_dir]=path_in_archive
1574
1575         d_files_to_add.update(d_bin_files_to_add)
1576     if options.sources:
1577         d_files_to_add.update(source_package(runner,
1578                                         runner.cfg,
1579                                         logger, 
1580                                         options,
1581                                         tmp_working_dir))
1582         if options.binaries:
1583             # for archives with bin and sources we provide a shell script able to 
1584             # install binaries for compilation
1585             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1586                                                       tmp_working_dir,
1587                                                       d_paths_to_substitute,
1588                                                       "install_bin.sh")
1589             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1590             logger.write("substitutions that need to be done later : \n", 5)
1591             logger.write(str(d_paths_to_substitute), 5)
1592             logger.write("\n", 5)
1593     else:
1594         # --salomeTool option is not considered when --sources is selected, as this option
1595         # already brings salomeTool!
1596         if options.sat:
1597             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1598                                   options, logger))
1599         
1600     if options.project:
1601         DBG.write("config for package %s" % options.project, runner.cfg)
1602         d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1603
1604     if not(d_files_to_add):
1605         msg = _("Error: Empty dictionnary to build the archive!\n")
1606         logger.write(src.printcolors.printcError(msg), 1)
1607         logger.write("\n", 1)
1608         return 1
1609
1610     # Add the README file in the package
1611     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1612     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1613
1614     # Add the additional files of option add_files
1615     if options.add_files:
1616         for file_path in options.add_files:
1617             if not os.path.exists(file_path):
1618                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1619                 continue
1620             file_name = os.path.basename(file_path)
1621             d_files_to_add[file_name] = (file_path, file_name)
1622
1623     logger.write("\n", 2)
1624     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1625     logger.write("\n", 2)
1626     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1627
1628     res = 0
1629     try:
1630         # Creating the object tarfile
1631         tar = tarfile.open(path_targz, mode='w:gz')
1632         
1633         # get the filtering function if needed
1634         filter_function = exclude_VCS_and_extensions
1635
1636         # Add the files to the tarfile object
1637         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1638         tar.close()
1639     except KeyboardInterrupt:
1640         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1641         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1642         # remove the working directory
1643         shutil.rmtree(tmp_working_dir)
1644         logger.write(_("OK"), 1)
1645         logger.write(_("\n"), 1)
1646         return 1
1647     
1648     # case if no application, only package sat as 'sat package -t'
1649     try:
1650         app = runner.cfg.APPLICATION
1651     except:
1652         app = None
1653
1654     # unconditionaly remove the tmp_local_working_dir
1655     if app is not None:
1656         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1657         if os.path.isdir(tmp_local_working_dir):
1658             shutil.rmtree(tmp_local_working_dir)
1659
1660     # remove the tmp directory, unless user has registered as developer
1661     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1662         shutil.rmtree(tmp_working_dir)
1663     
1664     # Print again the path of the package
1665     logger.write("\n", 2)
1666     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1667     
1668     return res