Salome HOME
sat #18868 : modifications pour pouvoir recompiler des archives quand des produits...
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28 import sys
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
35
36 BINARY = "binary"
37 SOURCE = "Source"
38 PROJECT = "Project"
39 SAT = "Sat"
40
41 ARCHIVE_DIR = "ARCHIVES"
42 PROJECT_DIR = "PROJECT"
43
44 IGNORED_DIRS = [".git", ".svn"]
45 IGNORED_EXTENSIONS = []
46
47 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
48
49 PROJECT_TEMPLATE = """#!/usr/bin/env python
50 #-*- coding:utf-8 -*-
51
52 # The path to the archive root directory
53 root_path : $PWD + "/../"
54 # path to the PROJECT
55 project_path : $PWD + "/"
56
57 # Where to search the archives of the products
58 ARCHIVEPATH : $root_path + "ARCHIVES"
59 # Where to search the pyconf of the applications
60 APPLICATIONPATH : $project_path + "applications/"
61 # Where to search the pyconf of the products
62 PRODUCTPATH : $project_path + "products/"
63 # Where to search the pyconf of the jobs of the project
64 JOBPATH : $project_path + "jobs/"
65 # Where to search the pyconf of the machines of the project
66 MACHINEPATH : $project_path + "machines/"
67 """
68
69 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
70 #-*- coding:utf-8 -*-
71
72   LOCAL :
73   {
74     base : 'default'
75     workdir : 'default'
76     log_dir : 'default'
77     archive_dir : 'default'
78     VCS : 'unknown'
79     tag : 'unknown'
80   }
81
82 PROJECTS :
83 {
84 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
85 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
86 }
87 """)
88
89 # Define all possible option for the package command :  sat package <options>
90 parser = src.options.Options()
91 parser.add_option('b', 'binaries', 'boolean', 'binaries',
92     _('Optional: Produce a binary package.'), False)
93 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
94     _('Optional: Only binary package: produce the archive even if '
95       'there are some missing products.'), False)
96 parser.add_option('s', 'sources', 'boolean', 'sources',
97     _('Optional: Produce a compilable archive of the sources of the '
98       'application.'), False)
99 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
100     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
101       'Sat prepare will use VCS mode instead to retrieve them'),
102     False)
103 parser.add_option('', 'ftp', 'boolean', 'ftp',
104     _('Optional: Do not embed archives for products in archive mode.' 
105     'Sat prepare will use ftp instead to retrieve them'),
106     False)
107 parser.add_option('p', 'project', 'string', 'project',
108     _('Optional: Produce an archive that contains a project.'), "")
109 parser.add_option('t', 'salometools', 'boolean', 'sat',
110     _('Optional: Produce an archive that contains salomeTools.'), False)
111 parser.add_option('n', 'name', 'string', 'name',
112     _('Optional: The name or full path of the archive.'), None)
113 parser.add_option('', 'add_files', 'list2', 'add_files',
114     _('Optional: The list of additional files to add to the archive.'), [])
115 parser.add_option('', 'without_properties', 'properties', 'without_properties',
116     _('Optional: Filter the products by their properties.\n\tSyntax: '
117       '--without_properties <property>:<value>'))
118
119
120 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
121     '''Create an archive containing all directories and files that are given in
122        the d_content argument.
123     
124     :param tar tarfile: The tarfile instance used to make the archive.
125     :param name_archive str: The name of the archive to make.
126     :param d_content dict: The dictionary that contain all directories and files
127                            to add in the archive.
128                            d_content[label] = 
129                                         (path_on_local_machine, path_in_archive)
130     :param logger Logger: the logging instance
131     :param f_exclude Function: the function that filters
132     :return: 0 if success, 1 if not.
133     :rtype: int
134     '''
135     # get the max length of the messages in order to make the display
136     max_len = len(max(d_content.keys(), key=len))
137     
138     success = 0
139     # loop over each directory or file stored in the d_content dictionary
140     names = sorted(d_content.keys())
141     DBG.write("add tar names", names)
142
143     # used to avoid duplications (for pip install in python, or single_install_dir cases)
144     already_added=set() 
145     for name in names:
146         # display information
147         len_points = max_len - len(name) + 3
148         local_path, archive_path = d_content[name]
149         in_archive = os.path.join(name_archive, archive_path)
150         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
151         # Get the local path and the path in archive 
152         # of the directory or file to add
153         # Add it in the archive
154         try:
155             key=local_path+"->"+in_archive
156             if key not in already_added:
157                 if old_python:
158                     tar.add(local_path,
159                                  arcname=in_archive,
160                                  exclude=exclude_VCS_and_extensions_26)
161                 else:
162                     tar.add(local_path,
163                                  arcname=in_archive,
164                                  filter=exclude_VCS_and_extensions)
165                 already_added.add(key)
166             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
167         except Exception as e:
168             logger.write(src.printcolors.printcError(_("KO ")), 3)
169             logger.write(str(e), 3)
170             success = 1
171         logger.write("\n", 3)
172     return success
173
174
175 def exclude_VCS_and_extensions_26(filename):
176     ''' The function that is used to exclude from package the link to the 
177         VCS repositories (like .git) (only for python 2.6)
178
179     :param filename Str: The filname to exclude (or not).
180     :return: True if the file has to be exclude
181     :rtype: Boolean
182     '''
183     for dir_name in IGNORED_DIRS:
184         if dir_name in filename:
185             return True
186     for extension in IGNORED_EXTENSIONS:
187         if filename.endswith(extension):
188             return True
189     return False
190
191 def exclude_VCS_and_extensions(tarinfo):
192     ''' The function that is used to exclude from package the link to the 
193         VCS repositories (like .git)
194
195     :param filename Str: The filname to exclude (or not).
196     :return: None if the file has to be exclude
197     :rtype: tarinfo or None
198     '''
199     filename = tarinfo.name
200     for dir_name in IGNORED_DIRS:
201         if dir_name in filename:
202             return None
203     for extension in IGNORED_EXTENSIONS:
204         if filename.endswith(extension):
205             return None
206     return tarinfo
207
208 def produce_relative_launcher(config,
209                               logger,
210                               file_dir,
211                               file_name,
212                               binaries_dir_name):
213     '''Create a specific SALOME launcher for the binary package. This launcher 
214        uses relative paths.
215     
216     :param config Config: The global configuration.
217     :param logger Logger: the logging instance
218     :param file_dir str: the directory where to put the launcher
219     :param file_name str: The launcher name
220     :param binaries_dir_name str: the name of the repository where the binaries
221                                   are, in the archive.
222     :return: the path of the produced launcher
223     :rtype: str
224     '''
225     
226     # set base mode to "no" for the archive - save current mode to restore it at the end
227     if "base" in config.APPLICATION:
228         base_setting=config.APPLICATION.base 
229     else:
230         base_setting="maybe"
231     config.APPLICATION.base="no"
232
233     # get KERNEL installation path 
234     kernel_info = src.product.get_product_config(config, "KERNEL")
235     kernel_base_name=os.path.basename(kernel_info.install_dir)
236     if kernel_base_name.startswith("config"):
237         # case of kernel installed in base. We remove "config-i"
238         kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
239     
240     kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
241
242     # set kernel bin dir (considering fhs property)
243     kernel_cfg = src.product.get_product_config(config, "KERNEL")
244     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
245         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
246     else:
247         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
248
249     # check if the application contains an application module
250     # check also if the application has a distene product, 
251     # in this case get its licence file name
252     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
253     salome_application_name="Not defined" 
254     distene_licence_file_name=False
255     for prod_name, prod_info in l_product_info:
256         # look for a "salome application" and a distene product
257         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
258             distene_licence_file_name = src.product.product_has_licence(prod_info, 
259                                             config.PATHS.LICENCEPATH) 
260         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
261             salome_application_name=prod_info.name
262
263     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
264     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
265     if salome_application_name == "Not defined":
266         app_root_dir=kernel_root_dir
267     else:
268         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
269
270     additional_env={}
271     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
272                                                    config.VARS.sep + bin_kernel_install_dir
273     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
274         additional_env['sat_python_version'] = 3
275     else:
276         additional_env['sat_python_version'] = 2
277
278     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
279
280     # create an environment file writer
281     writer = src.environment.FileEnvWriter(config,
282                                            logger,
283                                            file_dir,
284                                            src_root=None,
285                                            env_info=None)
286     
287     filepath = os.path.join(file_dir, file_name)
288     # Write
289     writer.write_env_file(filepath,
290                           False,  # for launch
291                           "cfgForPy",
292                           additional_env=additional_env,
293                           no_path_init="False",
294                           for_package = binaries_dir_name)
295     
296     # Little hack to put out_dir_Path outside the strings
297     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
298     src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
299     
300     # A hack to put a call to a file for distene licence.
301     # It does nothing to an application that has no distene product
302     if distene_licence_file_name:
303         logger.write("Application has a distene licence file! We use it in package launcher", 5)
304         hack_for_distene_licence(filepath, distene_licence_file_name)
305        
306     # change the rights in order to make the file executable for everybody
307     os.chmod(filepath,
308              stat.S_IRUSR |
309              stat.S_IRGRP |
310              stat.S_IROTH |
311              stat.S_IWUSR |
312              stat.S_IXUSR |
313              stat.S_IXGRP |
314              stat.S_IXOTH)
315
316     # restore modified setting by its initial value
317     config.APPLICATION.base=base_setting
318
319     return filepath
320
321 def hack_for_distene_licence(filepath, licence_file):
322     '''Replace the distene licence env variable by a call to a file.
323     
324     :param filepath Str: The path to the launcher to modify.
325     '''  
326     shutil.move(filepath, filepath + "_old")
327     fileout= filepath
328     filein = filepath + "_old"
329     fin = open(filein, "r")
330     fout = open(fileout, "w")
331     text = fin.readlines()
332     # Find the Distene section
333     num_line = -1
334     for i,line in enumerate(text):
335         if "# Set DISTENE License" in line:
336             num_line = i
337             break
338     if num_line == -1:
339         # No distene product, there is nothing to do
340         fin.close()
341         for line in text:
342             fout.write(line)
343         fout.close()
344         return
345     del text[num_line +1]
346     del text[num_line +1]
347     text_to_insert ="""    try:
348         distene_licence_file=r"%s"
349         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
350             import importlib.util
351             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
352             distene=importlib.util.module_from_spec(spec_dist)
353             spec_dist.loader.exec_module(distene)
354         else:
355             import imp
356             distene = imp.load_source('distene_licence', distene_licence_file)
357         distene.set_distene_variables(context)
358     except:
359         pass\n"""  % licence_file
360     text.insert(num_line + 1, text_to_insert)
361     for line in text:
362         fout.write(line)
363     fin.close()    
364     fout.close()
365     return
366     
367 def produce_relative_env_files(config,
368                               logger,
369                               file_dir,
370                               binaries_dir_name):
371     '''Create some specific environment files for the binary package. These 
372        files use relative paths.
373     
374     :param config Config: The global configuration.
375     :param logger Logger: the logging instance
376     :param file_dir str: the directory where to put the files
377     :param binaries_dir_name str: the name of the repository where the binaries
378                                   are, in the archive.
379     :return: the list of path of the produced environment files
380     :rtype: List
381     '''  
382     # create an environment file writer
383     writer = src.environment.FileEnvWriter(config,
384                                            logger,
385                                            file_dir,
386                                            src_root=None)
387     
388     if src.architecture.is_windows():
389       shell = "bat"
390       filename  = "env_launch.bat"
391     else:
392       shell = "bash"
393       filename  = "env_launch.sh"
394
395     # Write
396     filepath = writer.write_env_file(filename,
397                           False, # for launch
398                           shell,
399                           for_package = binaries_dir_name)
400
401     # Little hack to put out_dir_Path as environment variable
402     if src.architecture.is_windows() :
403       src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
404       src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
405       src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
406     else:
407       src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
408       src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
409
410     # change the rights in order to make the file executable for everybody
411     os.chmod(filepath,
412              stat.S_IRUSR |
413              stat.S_IRGRP |
414              stat.S_IROTH |
415              stat.S_IWUSR |
416              stat.S_IXUSR |
417              stat.S_IXGRP |
418              stat.S_IXOTH)
419     
420     return filepath
421
422 def produce_install_bin_file(config,
423                              logger,
424                              file_dir,
425                              d_sub,
426                              file_name):
427     '''Create a bash shell script which do substitutions in BIRARIES dir 
428        in order to use it for extra compilations.
429     
430     :param config Config: The global configuration.
431     :param logger Logger: the logging instance
432     :param file_dir str: the directory where to put the files
433     :param d_sub, dict: the dictionnary that contains the substitutions to be done
434     :param file_name str: the name of the install script file
435     :return: the produced file
436     :rtype: str
437     '''  
438     # Write
439     filepath = os.path.join(file_dir, file_name)
440     # open the file and write into it
441     # use codec utf-8 as sat variables are in unicode
442     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
443         installbin_template_path = os.path.join(config.VARS.internal_dir,
444                                         "INSTALL_BIN.template")
445         
446         # build the name of the directory that will contain the binaries
447         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
448         # build the substitution loop
449         loop_cmd = "for f in $(grep -RIl"
450         for key in d_sub:
451             loop_cmd += " -e "+ key
452         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
453                     '); do\n     sed -i "\n'
454         for key in d_sub:
455             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
456         loop_cmd += '            " $f\ndone'
457
458         d={}
459         d["BINARIES_DIR"] = binaries_dir_name
460         d["SUBSTITUTION_LOOP"]=loop_cmd
461         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
462         
463         # substitute the template and write it in file
464         content=src.template.substitute(installbin_template_path, d)
465         installbin_file.write(content)
466         # change the rights in order to make the file executable for everybody
467         os.chmod(filepath,
468                  stat.S_IRUSR |
469                  stat.S_IRGRP |
470                  stat.S_IROTH |
471                  stat.S_IWUSR |
472                  stat.S_IXUSR |
473                  stat.S_IXGRP |
474                  stat.S_IXOTH)
475     
476     return filepath
477
478 def product_appli_creation_script(config,
479                                   logger,
480                                   file_dir,
481                                   binaries_dir_name):
482     '''Create a script that can produce an application (EDF style) in the binary
483        package.
484     
485     :param config Config: The global configuration.
486     :param logger Logger: the logging instance
487     :param file_dir str: the directory where to put the file
488     :param binaries_dir_name str: the name of the repository where the binaries
489                                   are, in the archive.
490     :return: the path of the produced script file
491     :rtype: Str
492     '''
493     template_name = "create_appli.py.for_bin_packages.template"
494     template_path = os.path.join(config.VARS.internal_dir, template_name)
495     text_to_fill = open(template_path, "r").read()
496     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
497                                         '"' + binaries_dir_name + '"')
498     
499     text_to_add = ""
500     for product_name in get_SALOME_modules(config):
501         product_info = src.product.get_product_config(config, product_name)
502        
503         if src.product.product_is_smesh_plugin(product_info):
504             continue
505
506         if 'install_dir' in product_info and bool(product_info.install_dir):
507             if src.product.product_is_cpp(product_info):
508                 # cpp module
509                 for cpp_name in src.product.get_product_components(product_info):
510                     line_to_add = ("<module name=\"" + 
511                                    cpp_name + 
512                                    "\" gui=\"yes\" path=\"''' + "
513                                    "os.path.join(dir_bin_name, \"" + 
514                                    cpp_name + "\") + '''\"/>")
515             else:
516                 # regular module
517                 line_to_add = ("<module name=\"" + 
518                                product_name + 
519                                "\" gui=\"yes\" path=\"''' + "
520                                "os.path.join(dir_bin_name, \"" + 
521                                product_name + "\") + '''\"/>")
522             text_to_add += line_to_add + "\n"
523     
524     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
525     
526     tmp_file_path = os.path.join(file_dir, "create_appli.py")
527     ff = open(tmp_file_path, "w")
528     ff.write(filled_text)
529     ff.close()
530     
531     # change the rights in order to make the file executable for everybody
532     os.chmod(tmp_file_path,
533              stat.S_IRUSR |
534              stat.S_IRGRP |
535              stat.S_IROTH |
536              stat.S_IWUSR |
537              stat.S_IXUSR |
538              stat.S_IXGRP |
539              stat.S_IXOTH)
540     
541     return tmp_file_path
542
543 def binary_package(config, logger, options, tmp_working_dir):
544     '''Prepare a dictionary that stores all the needed directories and files to
545        add in a binary package.
546     
547     :param config Config: The global configuration.
548     :param logger Logger: the logging instance
549     :param options OptResult: the options of the launched command
550     :param tmp_working_dir str: The temporary local directory containing some 
551                                 specific directories or files needed in the 
552                                 binary package
553     :return: the dictionary that stores all the needed directories and files to
554              add in a binary package.
555              {label : (path_on_local_machine, path_in_archive)}
556     :rtype: dict
557     '''
558
559     # Get the list of product installation to add to the archive
560     l_products_name = sorted(config.APPLICATION.products.keys())
561     l_product_info = src.product.get_products_infos(l_products_name,
562                                                     config)
563
564     # suppress compile time products for binaries-only archives
565     if not options.sources:
566         update_config(config, logger, "compile_time", "yes")
567
568     l_install_dir = []
569     l_source_dir = []
570     l_not_installed = []
571     l_sources_not_present = []
572     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
573     if ("APPLICATION" in config  and
574         "properties"  in config.APPLICATION  and
575         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
576         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
577             generate_mesa_launcher=True
578
579     for prod_name, prod_info in l_product_info:
580         # skip product with property not_in_package set to yes
581         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
582             continue  
583
584         # Add the sources of the products that have the property 
585         # sources_in_package : "yes"
586         if src.get_property_in_product_cfg(prod_info,
587                                            "sources_in_package") == "yes":
588             if os.path.exists(prod_info.source_dir):
589                 l_source_dir.append((prod_name, prod_info.source_dir))
590             else:
591                 l_sources_not_present.append(prod_name)
592
593         # ignore the native and fixed products for install directories
594         if (src.product.product_is_native(prod_info) 
595                 or src.product.product_is_fixed(prod_info)
596                 or not src.product.product_compiles(prod_info)):
597             continue
598         # 
599         # products with single_fir property will be installed in the PRODUCTS directory of the archive
600         is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
601                        src.product.product_test_property(prod_info,"single_install_dir", "yes"))
602         if src.product.check_installation(config, prod_info):
603             l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir, is_single_dir))
604         else:
605             l_not_installed.append(prod_name)
606         
607         # Add also the cpp generated modules (if any)
608         if src.product.product_is_cpp(prod_info):
609             # cpp module
610             for name_cpp in src.product.get_product_components(prod_info):
611                 install_dir = os.path.join(config.APPLICATION.workdir,
612                                            config.INTERNAL.config.install_dir,
613                                            name_cpp) 
614                 if os.path.exists(install_dir):
615                     l_install_dir.append((name_cpp, name_cpp, install_dir, False))
616                 else:
617                     l_not_installed.append(name_cpp)
618         
619     # check the name of the directory that (could) contains the binaries 
620     # from previous detar
621     binaries_from_detar = os.path.join(
622                               config.APPLICATION.workdir,
623                               config.INTERNAL.config.binary_dir + config.VARS.dist)
624     if os.path.exists(binaries_from_detar):
625          logger.write("""
626 WARNING: existing binaries directory from previous detar installation:
627          %s
628          To make new package from this, you have to: 
629          1) install binaries in INSTALL directory with the script "install_bin.sh" 
630             see README file for more details
631          2) or recompile everything in INSTALL with "sat compile" command 
632             this step is long, and requires some linux packages to be installed 
633             on your system\n
634 """ % binaries_from_detar)
635     
636     # Print warning or error if there are some missing products
637     if len(l_not_installed) > 0:
638         text_missing_prods = ""
639         for p_name in l_not_installed:
640             text_missing_prods += " - " + p_name + "\n"
641         if not options.force_creation:
642             msg = _("ERROR: there are missing product installations:")
643             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
644                                      text_missing_prods),
645                          1)
646             raise src.SatException(msg)
647         else:
648             msg = _("WARNING: there are missing products installations:")
649             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
650                                      text_missing_prods),
651                          1)
652
653     # Do the same for sources
654     if len(l_sources_not_present) > 0:
655         text_missing_prods = ""
656         for p_name in l_sources_not_present:
657             text_missing_prods += "-" + p_name + "\n"
658         if not options.force_creation:
659             msg = _("ERROR: there are missing product sources:")
660             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
661                                      text_missing_prods),
662                          1)
663             raise src.SatException(msg)
664         else:
665             msg = _("WARNING: there are missing products sources:")
666             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
667                                      text_missing_prods),
668                          1)
669  
670     # construct the name of the directory that will contain the binaries
671     if src.architecture.is_windows():
672         binaries_dir_name = config.INTERNAL.config.binary_dir
673     else:
674         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
675     # construct the correlation table between the product names, there 
676     # actual install directories and there install directory in archive
677     d_products = {}
678     for prod_name, prod_info_name, install_dir, is_single_dir in l_install_dir:
679         prod_base_name=os.path.basename(install_dir)
680         if prod_base_name.startswith("config"):
681             # case of a products installed in base. Because the archive is in base:no mode, 
682             # we replace "config-i" by the product name or by PRODUCTS if single-dir
683             if is_single_dir:
684                 prod_base_name=config.INTERNAL.config.single_install_dir
685             else:
686                 prod_base_name=prod_info_name
687         path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
688         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
689         
690     for prod_name, source_dir in l_source_dir:
691         path_in_archive = os.path.join("SOURCES", prod_name)
692         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
693
694     # for packages of SALOME applications including KERNEL, 
695     # we produce a salome launcher or a virtual application (depending on salome version)
696     if 'KERNEL' in config.APPLICATION.products:
697         VersionSalome = src.get_salome_version(config)
698         # Case where SALOME has the launcher that uses the SalomeContext API
699         if VersionSalome >= 730:
700             # create the relative launcher and add it to the files to add
701             launcher_name = src.get_launcher_name(config)
702             launcher_package = produce_relative_launcher(config,
703                                                  logger,
704                                                  tmp_working_dir,
705                                                  launcher_name,
706                                                  binaries_dir_name)
707             d_products["launcher"] = (launcher_package, launcher_name)
708
709             # if the application contains mesa products, we generate in addition to the 
710             # classical salome launcher a launcher using mesa and called mesa_salome 
711             # (the mesa launcher will be used for remote usage through ssh).
712             if generate_mesa_launcher:
713                 #if there is one : store the use_mesa property
714                 restore_use_mesa_option=None
715                 if ('properties' in config.APPLICATION and 
716                     'use_mesa' in config.APPLICATION.properties):
717                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
718
719                 # activate mesa property, and generate a mesa launcher
720                 src.activate_mesa_property(config)  #activate use_mesa property
721                 launcher_mesa_name="mesa_"+launcher_name
722                 launcher_package_mesa = produce_relative_launcher(config,
723                                                      logger,
724                                                      tmp_working_dir,
725                                                      launcher_mesa_name,
726                                                      binaries_dir_name)
727                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
728
729                 # if there was a use_mesa value, we restore it
730                 # else we set it to the default value "no"
731                 if restore_use_mesa_option != None:
732                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
733                 else:
734                     config.APPLICATION.properties.use_mesa="no"
735
736             if options.sources:
737                 # if we mix binaries and sources, we add a copy of the launcher, 
738                 # prefixed  with "bin",in order to avoid clashes
739                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
740         else:
741             # Provide a script for the creation of an application EDF style
742             appli_script = product_appli_creation_script(config,
743                                                         logger,
744                                                         tmp_working_dir,
745                                                         binaries_dir_name)
746             
747             d_products["appli script"] = (appli_script, "create_appli.py")
748
749     # Put also the environment file
750     env_file = produce_relative_env_files(config,
751                                            logger,
752                                            tmp_working_dir,
753                                            binaries_dir_name)
754
755     if src.architecture.is_windows():
756       filename  = "env_launch.bat"
757     else:
758       filename  = "env_launch.sh"
759     d_products["environment file"] = (env_file, filename)      
760     return d_products
761
762 def source_package(sat, config, logger, options, tmp_working_dir):
763     '''Prepare a dictionary that stores all the needed directories and files to
764        add in a source package.
765     
766     :param config Config: The global configuration.
767     :param logger Logger: the logging instance
768     :param options OptResult: the options of the launched command
769     :param tmp_working_dir str: The temporary local directory containing some 
770                                 specific directories or files needed in the 
771                                 binary package
772     :return: the dictionary that stores all the needed directories and files to
773              add in a source package.
774              {label : (path_on_local_machine, path_in_archive)}
775     :rtype: dict
776     '''
777     
778     d_archives={}
779     # Get all the products that are prepared using an archive
780     # unless ftp mode is specified (in this case the user of the
781     # archive will get the sources through the ftp mode of sat prepare
782     if not options.ftp:
783         logger.write("Find archive products ... ")
784         d_archives, l_pinfo_vcs = get_archives(config, logger)
785         logger.write("Done\n")
786
787     d_archives_vcs = {}
788     if not options.with_vcs and len(l_pinfo_vcs) > 0:
789         # Make archives with the products that are not prepared using an archive
790         # (git, cvs, svn, etc)
791         logger.write("Construct archives for vcs products ... ")
792         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
793                                           sat,
794                                           config,
795                                           logger,
796                                           tmp_working_dir)
797         logger.write("Done\n")
798
799     # Create a project
800     logger.write("Create the project ... ")
801     d_project = create_project_for_src_package(config,
802                                                tmp_working_dir,
803                                                options.with_vcs,
804                                                options.ftp)
805     logger.write("Done\n")
806     
807     # Add salomeTools
808     tmp_sat = add_salomeTools(config, tmp_working_dir)
809     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
810     
811     # Add a sat symbolic link if not win
812     if not src.architecture.is_windows():
813         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
814         try:
815             t = os.getcwd()
816         except:
817             # In the jobs, os.getcwd() can fail
818             t = config.LOCAL.workdir
819         os.chdir(tmp_working_dir)
820         if os.path.lexists(tmp_satlink_path):
821             os.remove(tmp_satlink_path)
822         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
823         os.chdir(t)
824         
825         d_sat["sat link"] = (tmp_satlink_path, "sat")
826     
827     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
828     return d_source
829
830 def get_archives(config, logger):
831     '''Find all the products that are get using an archive and all the products
832        that are get using a vcs (git, cvs, svn) repository.
833     
834     :param config Config: The global configuration.
835     :param logger Logger: the logging instance
836     :return: the dictionary {name_product : 
837              (local path of its archive, path in the package of its archive )}
838              and the list of specific configuration corresponding to the vcs 
839              products
840     :rtype: (Dict, List)
841     '''
842     # Get the list of product informations
843     l_products_name = config.APPLICATION.products.keys()
844     l_product_info = src.product.get_products_infos(l_products_name,
845                                                     config)
846     d_archives = {}
847     l_pinfo_vcs = []
848     for p_name, p_info in l_product_info:
849         # skip product with property not_in_package set to yes
850         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
851             continue  
852         # ignore the native and fixed products
853         if (src.product.product_is_native(p_info) 
854                 or src.product.product_is_fixed(p_info)):
855             continue
856         if p_info.get_source == "archive":
857             archive_path = p_info.archive_info.archive_name
858             archive_name = os.path.basename(archive_path)
859             d_archives[p_name] = (archive_path,
860                                   os.path.join(ARCHIVE_DIR, archive_name))
861             if (src.appli_test_property(config,"pip", "yes") and 
862                 src.product.product_test_property(p_info,"pip", "yes")):
863                 # if pip mode is activated, and product is managed by pip
864                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
865                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
866                     "%s-%s*" % (p_info.name, p_info.version))
867                 pip_wheel_path=glob.glob(pip_wheel_pattern)
868                 msg_pip_not_found="Error in get_archive, pip wheel for "\
869                                   "product %s-%s was not found in %s directory"
870                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
871                                   "product %s-%s were found in %s directory"
872                 if len(pip_wheel_path)==0:
873                     raise src.SatException(msg_pip_not_found %\
874                         (p_info.name, p_info.version, pip_wheels_dir))
875                 if len(pip_wheel_path)>1:
876                     raise src.SatException(msg_pip_two_or_more %\
877                         (p_info.name, p_info.version, pip_wheels_dir))
878
879                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
880                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
881                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
882         else:
883             # this product is not managed by archive, 
884             # an archive of the vcs directory will be created by get_archive_vcs
885             l_pinfo_vcs.append((p_name, p_info)) 
886             
887     return d_archives, l_pinfo_vcs
888
889 def add_salomeTools(config, tmp_working_dir):
890     '''Prepare a version of salomeTools that has a specific local.pyconf file 
891        configured for a source package.
892
893     :param config Config: The global configuration.
894     :param tmp_working_dir str: The temporary local directory containing some 
895                                 specific directories or files needed in the 
896                                 source package
897     :return: The path to the local salomeTools directory to add in the package
898     :rtype: str
899     '''
900     # Copy sat in the temporary working directory
901     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
902     sat_running_path = src.Path(config.VARS.salometoolsway)
903     sat_running_path.copy(sat_tmp_path)
904     
905     # Update the local.pyconf file that contains the path to the project
906     local_pyconf_name = "local.pyconf"
907     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
908     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
909     # Remove the .pyconf file in the root directory of salomeTools if there is
910     # any. (For example when launching jobs, a pyconf file describing the jobs 
911     # can be here and is not useful) 
912     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
913     for file_or_dir in files_or_dir_SAT:
914         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
915             file_path = os.path.join(tmp_working_dir,
916                                      "salomeTools",
917                                      file_or_dir)
918             os.remove(file_path)
919     
920     ff = open(local_pyconf_file, "w")
921     ff.write(LOCAL_TEMPLATE)
922     ff.close()
923     
924     return sat_tmp_path.path
925
926 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
927     '''For sources package that require that all products are get using an 
928        archive, one has to create some archive for the vcs products.
929        So this method calls the clean and source command of sat and then create
930        the archives.
931
932     :param l_pinfo_vcs List: The list of specific configuration corresponding to
933                              each vcs product
934     :param sat Sat: The Sat instance that can be called to clean and source the
935                     products
936     :param config Config: The global configuration.
937     :param logger Logger: the logging instance
938     :param tmp_working_dir str: The temporary local directory containing some 
939                                 specific directories or files needed in the 
940                                 source package
941     :return: the dictionary that stores all the archives to add in the source 
942              package. {label : (path_on_local_machine, path_in_archive)}
943     :rtype: dict
944     '''
945     # clean the source directory of all the vcs products, then use the source 
946     # command and thus construct an archive that will not contain the patches
947     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
948     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
949       logger.write(_("\nclean sources\n"))
950       args_clean = config.VARS.application
951       args_clean += " --sources --products "
952       args_clean += ",".join(l_prod_names)
953       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
954       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
955     if True:
956       # source
957       logger.write(_("get sources\n"))
958       args_source = config.VARS.application
959       args_source += " --products "
960       args_source += ",".join(l_prod_names)
961       svgDir = sat.cfg.APPLICATION.workdir
962       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
963       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
964       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
965       # DBG.write("sat config id", id(sat.cfg), True)
966       # shit as config is not same id() as for sat.source()
967       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
968       import source
969       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
970       
971       # make the new archives
972       d_archives_vcs = {}
973       for pn, pinfo in l_pinfo_vcs:
974           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
975           logger.write("make archive vcs '%s'\n" % path_archive)
976           d_archives_vcs[pn] = (path_archive,
977                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
978       sat.cfg.APPLICATION.workdir = svgDir
979       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
980     return d_archives_vcs
981
982 def make_archive(prod_name, prod_info, where):
983     '''Create an archive of a product by searching its source directory.
984
985     :param prod_name str: The name of the product.
986     :param prod_info Config: The specific configuration corresponding to the 
987                              product
988     :param where str: The path of the repository where to put the resulting 
989                       archive
990     :return: The path of the resulting archive
991     :rtype: str
992     '''
993     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
994     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
995     local_path = prod_info.source_dir
996     if old_python:
997         tar_prod.add(local_path,
998                      arcname=prod_name,
999                      exclude=exclude_VCS_and_extensions_26)
1000     else:
1001         tar_prod.add(local_path,
1002                      arcname=prod_name,
1003                      filter=exclude_VCS_and_extensions)
1004     tar_prod.close()
1005     return path_targz_prod       
1006
1007 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
1008     '''Create a specific project for a source package.
1009
1010     :param config Config: The global configuration.
1011     :param tmp_working_dir str: The temporary local directory containing some 
1012                                 specific directories or files needed in the 
1013                                 source package
1014     :param with_vcs boolean: True if the package is with vcs products (not 
1015                              transformed into archive products)
1016     :param with_ftp boolean: True if the package use ftp servers to get archives
1017     :return: The dictionary 
1018              {"project" : (produced project, project path in the archive)}
1019     :rtype: Dict
1020     '''
1021
1022     # Create in the working temporary directory the full project tree
1023     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1024     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
1025                                          "products")
1026     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
1027                                          "products",
1028                                          "compil_scripts")
1029     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
1030                                          "products",
1031                                          "env_scripts")
1032     patches_tmp_dir = os.path.join(project_tmp_dir,
1033                                          "products",
1034                                          "patches")
1035     application_tmp_dir = os.path.join(project_tmp_dir,
1036                                          "applications")
1037     for directory in [project_tmp_dir,
1038                       compil_scripts_tmp_dir,
1039                       env_scripts_tmp_dir,
1040                       patches_tmp_dir,
1041                       application_tmp_dir]:
1042         src.ensure_path_exists(directory)
1043
1044     # Create the pyconf that contains the information of the project
1045     project_pyconf_name = "project.pyconf"        
1046     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1047     ff = open(project_pyconf_file, "w")
1048     ff.write(PROJECT_TEMPLATE)
1049     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1050         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
1051         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1052             ftp_path=ftp_path+":"+ftpserver
1053         ftp_path+='"'
1054         ff.write("# ftp servers where to search for prerequisite archives\n")
1055         ff.write(ftp_path)
1056     # add licence paths if any
1057     if len(config.PATHS.LICENCEPATH) > 0:  
1058         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1059         for path in config.PATHS.LICENCEPATH[1:]:
1060             licence_path=licence_path+":"+path
1061         licence_path+='"'
1062         ff.write("\n# Where to search for licences\n")
1063         ff.write(licence_path)
1064         
1065
1066     ff.close()
1067     
1068     # Loop over the products to get there pyconf and all the scripts 
1069     # (compilation, environment, patches)
1070     # and create the pyconf file to add to the project
1071     lproducts_name = config.APPLICATION.products.keys()
1072     l_products = src.product.get_products_infos(lproducts_name, config)
1073     for p_name, p_info in l_products:
1074         # skip product with property not_in_package set to yes
1075         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1076             continue  
1077         find_product_scripts_and_pyconf(p_name,
1078                                         p_info,
1079                                         config,
1080                                         with_vcs,
1081                                         compil_scripts_tmp_dir,
1082                                         env_scripts_tmp_dir,
1083                                         patches_tmp_dir,
1084                                         products_pyconf_tmp_dir)
1085     
1086     # for the application pyconf, we write directly the config
1087     # don't search for the original pyconf file
1088     # to avoid problems with overwrite sections and rm_products key
1089     write_application_pyconf(config, application_tmp_dir)
1090     
1091     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1092     return d_project
1093
1094 def find_product_scripts_and_pyconf(p_name,
1095                                     p_info,
1096                                     config,
1097                                     with_vcs,
1098                                     compil_scripts_tmp_dir,
1099                                     env_scripts_tmp_dir,
1100                                     patches_tmp_dir,
1101                                     products_pyconf_tmp_dir):
1102     '''Create a specific pyconf file for a given product. Get its environment 
1103        script, its compilation script and patches and put it in the temporary
1104        working directory. This method is used in the source package in order to
1105        construct the specific project.
1106
1107     :param p_name str: The name of the product.
1108     :param p_info Config: The specific configuration corresponding to the 
1109                              product
1110     :param config Config: The global configuration.
1111     :param with_vcs boolean: True if the package is with vcs products (not 
1112                              transformed into archive products)
1113     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1114                                        scripts directory of the project.
1115     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1116                                     directory of the project.
1117     :param patches_tmp_dir str: The path to the temporary patch scripts 
1118                                 directory of the project.
1119     :param products_pyconf_tmp_dir str: The path to the temporary product 
1120                                         scripts directory of the project.
1121     '''
1122     
1123     # read the pyconf of the product
1124     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1125
1126     # find the compilation script if any
1127     if src.product.product_has_script(p_info):
1128         compil_script_path = src.Path(p_info.compil_script)
1129         compil_script_path.copy(compil_scripts_tmp_dir)
1130
1131     # find the environment script if any
1132     if src.product.product_has_env_script(p_info):
1133         env_script_path = src.Path(p_info.environ.env_script)
1134         env_script_path.copy(env_scripts_tmp_dir)
1135
1136     # find the patches if any
1137     if src.product.product_has_patches(p_info):
1138         patches = src.pyconf.Sequence()
1139         for patch_path in p_info.patches:
1140             p_path = src.Path(patch_path)
1141             p_path.copy(patches_tmp_dir)
1142             patches.append(os.path.basename(patch_path), "")
1143
1144     if (not with_vcs) and src.product.product_is_vcs(p_info):
1145         # in non vcs mode, if the product is not archive, then make it become archive.
1146
1147         # depending upon the incremental mode, select impacted sections
1148         if "properties" in p_info and "incremental" in p_info.properties and\
1149             p_info.properties.incremental == "yes":
1150             sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1151         else:
1152             sections = [p_info.section]
1153         for section in sections:
1154             if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1155                 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1156                           (p_name,section))
1157                 product_pyconf_cfg[section].get_source = "archive"
1158                 if not "archive_info" in product_pyconf_cfg[section]:
1159                     product_pyconf_cfg[section].addMapping("archive_info",
1160                                         src.pyconf.Mapping(product_pyconf_cfg),
1161                                         "")
1162                     product_pyconf_cfg[section].archive_info.archive_name =\
1163                         p_info.name + ".tgz"
1164     
1165     if (with_vcs) and src.product.product_is_vcs(p_info):
1166         # in vcs mode we must replace explicitely the git server url
1167         # (or it will not be found later because project files are not exported in archives)
1168         for section in product_pyconf_cfg:
1169             # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
1170             if "git_info" in product_pyconf_cfg[section]:
1171                 for repo in product_pyconf_cfg[section].git_info:
1172                     if repo in p_info.git_info:
1173                         product_pyconf_cfg[section].git_info[repo] =  p_info.git_info[repo]
1174
1175     # write the pyconf file to the temporary project location
1176     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1177                                            p_name + ".pyconf")
1178     ff = open(product_tmp_pyconf_path, 'w')
1179     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1180     product_pyconf_cfg.__save__(ff, 1)
1181     ff.close()
1182
1183
1184 def write_application_pyconf(config, application_tmp_dir):
1185     '''Write the application pyconf file in the specific temporary 
1186        directory containing the specific project of a source package.
1187
1188     :param config Config: The global configuration.
1189     :param application_tmp_dir str: The path to the temporary application 
1190                                     scripts directory of the project.
1191     '''
1192     application_name = config.VARS.application
1193     # write the pyconf file to the temporary application location
1194     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1195                                                application_name + ".pyconf")
1196     with open(application_tmp_pyconf_path, 'w') as f:
1197         f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1198         res = src.pyconf.Config()
1199         app = src.pyconf.deepCopyMapping(config.APPLICATION)
1200
1201         # set base mode to "no" for the archive
1202         app.base = "no"
1203
1204         # Change the workdir
1205         app.workdir = src.pyconf.Reference(
1206                                  app,
1207                                  src.pyconf.DOLLAR,
1208                                  'VARS.salometoolsway + $VARS.sep + ".."')
1209         res.addMapping("APPLICATION", app, "")
1210         res.__save__(f, evaluated=False)
1211     
1212
1213 def sat_package(config, tmp_working_dir, options, logger):
1214     '''Prepare a dictionary that stores all the needed directories and files to
1215        add in a salomeTool package.
1216     
1217     :param tmp_working_dir str: The temporary local working directory 
1218     :param options OptResult: the options of the launched command
1219     :return: the dictionary that stores all the needed directories and files to
1220              add in a salomeTool package.
1221              {label : (path_on_local_machine, path_in_archive)}
1222     :rtype: dict
1223     '''
1224     d_project = {}
1225
1226     # we include sat himself
1227     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1228
1229     # and we overwrite local.pyconf with a clean wersion.
1230     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1231     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1232     local_cfg = src.pyconf.Config(local_file_path)
1233     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1234     local_cfg.LOCAL["base"] = "default"
1235     local_cfg.LOCAL["workdir"] = "default"
1236     local_cfg.LOCAL["log_dir"] = "default"
1237     local_cfg.LOCAL["archive_dir"] = "default"
1238     local_cfg.LOCAL["VCS"] = "None"
1239     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1240
1241     # if the archive contains a project, we write its relative path in local.pyconf
1242     if options.project:
1243         project_arch_path = os.path.join("projects", options.project, 
1244                                          os.path.basename(options.project_file_path))
1245         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1246
1247     ff = open(local_pyconf_tmp_path, 'w')
1248     local_cfg.__save__(ff, 1)
1249     ff.close()
1250     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1251     return d_project
1252     
1253
1254 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1255     '''Prepare a dictionary that stores all the needed directories and files to
1256        add in a project package.
1257     
1258     :param project_file_path str: The path to the local project.
1259     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1260     :param tmp_working_dir str: The temporary local directory containing some 
1261                                 specific directories or files needed in the 
1262                                 project package
1263     :param embedded_in_sat boolean : the project package is embedded in a sat package
1264     :return: the dictionary that stores all the needed directories and files to
1265              add in a project package.
1266              {label : (path_on_local_machine, path_in_archive)}
1267     :rtype: dict
1268     '''
1269     d_project = {}
1270     # Read the project file and get the directories to add to the package
1271     
1272     try: 
1273       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1274     except:
1275       logger.write("""
1276 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1277       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1278       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1279     
1280     paths = {"APPLICATIONPATH" : "applications",
1281              "PRODUCTPATH" : "products",
1282              "JOBPATH" : "jobs",
1283              "MACHINEPATH" : "machines"}
1284     if not ftp_mode:
1285         paths["ARCHIVEPATH"] = "archives"
1286
1287     # Loop over the project paths and add it
1288     project_file_name = os.path.basename(project_file_path)
1289     for path in paths:
1290         if path not in project_pyconf_cfg:
1291             continue
1292         if embedded_in_sat:
1293             dest_path = os.path.join("projects", name_project, paths[path])
1294             project_file_dest = os.path.join("projects", name_project, project_file_name)
1295         else:
1296             dest_path = paths[path]
1297             project_file_dest = project_file_name
1298
1299         # Add the directory to the files to add in the package
1300         d_project[path] = (project_pyconf_cfg[path], dest_path)
1301
1302         # Modify the value of the path in the package
1303         project_pyconf_cfg[path] = src.pyconf.Reference(
1304                                     project_pyconf_cfg,
1305                                     src.pyconf.DOLLAR,
1306                                     'project_path + "/' + paths[path] + '"')
1307     
1308     # Modify some values
1309     if "project_path" not in project_pyconf_cfg:
1310         project_pyconf_cfg.addMapping("project_path",
1311                                       src.pyconf.Mapping(project_pyconf_cfg),
1312                                       "")
1313     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1314                                                            src.pyconf.DOLLAR,
1315                                                            'PWD')
1316     # we don't want to export these two fields
1317     project_pyconf_cfg.__delitem__("file_path")
1318     project_pyconf_cfg.__delitem__("PWD")
1319     if ftp_mode:
1320         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1321     
1322     # Write the project pyconf file
1323     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1324     ff = open(project_pyconf_tmp_path, 'w')
1325     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1326     project_pyconf_cfg.__save__(ff, 1)
1327     ff.close()
1328     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1329     
1330     return d_project
1331
1332 def add_readme(config, options, where):
1333     readme_path = os.path.join(where, "README")
1334     with codecs.open(readme_path, "w", 'utf-8') as f:
1335
1336     # templates for building the header
1337         readme_header="""
1338 # This package was generated with sat $version
1339 # Date: $date
1340 # User: $user
1341 # Distribution : $dist
1342
1343 In the following, $$ROOT represents the directory where you have installed 
1344 SALOME (the directory where this file is located).
1345
1346 """
1347         if src.architecture.is_windows():
1348             readme_header = readme_header.replace('$$ROOT','%ROOT%')
1349         readme_compilation_with_binaries="""
1350
1351 compilation based on the binaries used as prerequisites
1352 =======================================================
1353
1354 If you fail to compile the complete application (for example because
1355 you are not root on your system and cannot install missing packages), you
1356 may try a partial compilation based on the binaries.
1357 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1358 and do some substitutions on cmake and .la files (replace the build directories
1359 with local paths).
1360 The procedure to do it is:
1361  1) Remove or rename INSTALL directory if it exists
1362  2) Execute the shell script install_bin.sh:
1363  > cd $ROOT
1364  > ./install_bin.sh
1365  3) Use SalomeTool (as explained in Sources section) and compile only the 
1366     modules you need to (with -p option)
1367
1368 """
1369         readme_header_tpl=string.Template(readme_header)
1370         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1371                 "README_BIN.template")
1372         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1373                 "README_LAUNCHER.template")
1374         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1375                 "README_BIN_VIRTUAL_APP.template")
1376         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1377                 "README_SRC.template")
1378         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1379                 "README_PROJECT.template")
1380         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1381                 "README_SAT.template")
1382
1383         # prepare substitution dictionary
1384         d = dict()
1385         d['user'] = config.VARS.user
1386         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1387         d['version'] = src.get_salometool_version(config)
1388         d['dist'] = config.VARS.dist
1389         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1390
1391         if options.binaries or options.sources:
1392             d['application'] = config.VARS.application
1393             d['BINARIES']    = config.INTERNAL.config.binary_dir
1394             d['SEPARATOR'] = config.VARS.sep
1395             if src.architecture.is_windows():
1396                 d['operatingSystem'] = 'Windows'
1397                 d['PYTHON3'] = 'python3'
1398                 d['ROOT']    = '%ROOT%'
1399             else:
1400                 d['operatingSystem'] = 'Linux'
1401                 d['PYTHON3'] = ''
1402                 d['ROOT']    = '$ROOT'
1403             f.write("# Application: " + d['application'] + "\n")
1404             if 'KERNEL' in config.APPLICATION.products:
1405                 VersionSalome = src.get_salome_version(config)
1406                 # Case where SALOME has the launcher that uses the SalomeContext API
1407                 if VersionSalome >= 730:
1408                     d['launcher'] = config.APPLICATION.profile.launcher_name
1409                 else:
1410                     d['virtual_app'] = 'runAppli' # this info is not used now)
1411
1412         # write the specific sections
1413         if options.binaries:
1414             f.write(src.template.substitute(readme_template_path_bin, d))
1415             if "virtual_app" in d:
1416                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1417             if "launcher" in d:
1418                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1419
1420         if options.sources:
1421             f.write(src.template.substitute(readme_template_path_src, d))
1422
1423         if options.binaries and options.sources and not src.architecture.is_windows():
1424             f.write(readme_compilation_with_binaries)
1425
1426         if options.project:
1427             f.write(src.template.substitute(readme_template_path_pro, d))
1428
1429         if options.sat:
1430             f.write(src.template.substitute(readme_template_path_sat, d))
1431     
1432     return readme_path
1433
1434 def update_config(config, logger,  prop, value):
1435     '''Remove from config.APPLICATION.products the products that have the property given as input.
1436     
1437     :param config Config: The global config.
1438     :param prop str: The property to filter
1439     :param value str: The value of the property to filter
1440     '''
1441     # if there is no APPLICATION (ex sat package -t) : nothing to do
1442     if "APPLICATION" in config:
1443         l_product_to_remove = []
1444         for product_name in config.APPLICATION.products.keys():
1445             prod_cfg = src.product.get_product_config(config, product_name)
1446             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1447                 l_product_to_remove.append(product_name)
1448         for product_name in l_product_to_remove:
1449             config.APPLICATION.products.__delitem__(product_name)
1450             logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1451
1452 def description():
1453     '''method that is called when salomeTools is called with --help option.
1454     
1455     :return: The text to display for the package command description.
1456     :rtype: str
1457     '''
1458     return _("""
1459 The package command creates a tar file archive of a product.
1460 There are four kinds of archive, which can be mixed:
1461
1462  1 - The binary archive. 
1463      It contains the product installation directories plus a launcher.
1464  2 - The sources archive. 
1465      It contains the product archives, a project (the application plus salomeTools).
1466  3 - The project archive. 
1467      It contains a project (give the project file path as argument).
1468  4 - The salomeTools archive. 
1469      It contains code utility salomeTools.
1470
1471 example:
1472  >> sat package SALOME-master --binaries --sources""")
1473   
1474 def run(args, runner, logger):
1475     '''method that is called when salomeTools is called with package parameter.
1476     '''
1477     
1478     # Parse the options
1479     (options, args) = parser.parse_args(args)
1480
1481     # Check that a type of package is called, and only one
1482     all_option_types = (options.binaries,
1483                         options.sources,
1484                         options.project not in ["", None],
1485                         options.sat)
1486
1487     # Check if no option for package type
1488     if all_option_types.count(True) == 0:
1489         msg = _("Error: Precise a type for the package\nUse one of the "
1490                 "following options: --binaries, --sources, --project or"
1491                 " --salometools")
1492         logger.write(src.printcolors.printcError(msg), 1)
1493         logger.write("\n", 1)
1494         return 1
1495     
1496     # The repository where to put the package if not Binary or Source
1497     package_default_path = runner.cfg.LOCAL.workdir
1498     
1499     # if the package contains binaries or sources:
1500     if options.binaries or options.sources:
1501         # Check that the command has been called with an application
1502         src.check_config_has_application(runner.cfg)
1503
1504         # Display information
1505         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1506                                                     runner.cfg.VARS.application), 1)
1507         
1508         # Get the default directory where to put the packages
1509         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1510         src.ensure_path_exists(package_default_path)
1511         
1512     # if the package contains a project:
1513     if options.project:
1514         # check that the project is visible by SAT
1515         projectNameFile = options.project + ".pyconf"
1516         foundProject = None
1517         for i in runner.cfg.PROJECTS.project_file_paths:
1518             baseName = os.path.basename(i)
1519             if baseName == projectNameFile:
1520                 foundProject = i
1521                 break
1522
1523         if foundProject is None:
1524             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1525             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1526 known projects are:
1527 %(2)s
1528
1529 Please add it in file:
1530 %(3)s""" % \
1531                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1532             logger.write(src.printcolors.printcError(msg), 1)
1533             logger.write("\n", 1)
1534             return 1
1535         else:
1536             options.project_file_path = foundProject
1537             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1538     
1539     # Remove the products that are filtered by the --without_properties option
1540     if options.without_properties:
1541         prop, value = options.without_properties
1542         update_config(runner.cfg, logger, prop, value)
1543
1544     # Remove from config the products that have the not_in_package property
1545     update_config(runner.cfg, logger, "not_in_package", "yes")
1546
1547     # get the name of the archive or build it
1548     if options.name:
1549         if os.path.basename(options.name) == options.name:
1550             # only a name (not a path)
1551             archive_name = options.name           
1552             dir_name = package_default_path
1553         else:
1554             archive_name = os.path.basename(options.name)
1555             dir_name = os.path.dirname(options.name)
1556         
1557         # suppress extension
1558         if archive_name[-len(".tgz"):] == ".tgz":
1559             archive_name = archive_name[:-len(".tgz")]
1560         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1561             archive_name = archive_name[:-len(".tar.gz")]
1562         
1563     else:
1564         archive_name=""
1565         dir_name = package_default_path
1566         if options.binaries or options.sources:
1567             archive_name = runner.cfg.APPLICATION.name
1568
1569         if options.binaries:
1570             archive_name += "-"+runner.cfg.VARS.dist
1571             
1572         if options.sources:
1573             archive_name += "-SRC"
1574             if options.with_vcs:
1575                 archive_name += "-VCS"
1576
1577         if options.sat:
1578             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1579
1580         if options.project:
1581             if options.sat:
1582                 archive_name += "_" 
1583             archive_name += ("satproject_" + options.project)
1584  
1585         if len(archive_name)==0: # no option worked 
1586             msg = _("Error: Cannot name the archive\n"
1587                     " check if at least one of the following options was "
1588                     "selected : --binaries, --sources, --project or"
1589                     " --salometools")
1590             logger.write(src.printcolors.printcError(msg), 1)
1591             logger.write("\n", 1)
1592             return 1
1593  
1594     path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1595     
1596     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1597
1598     # Create a working directory for all files that are produced during the
1599     # package creation and that will be removed at the end of the command
1600     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1601     src.ensure_path_exists(tmp_working_dir)
1602     logger.write("\n", 5)
1603     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1604     
1605     logger.write("\n", 3)
1606
1607     msg = _("Preparation of files to add to the archive")
1608     logger.write(src.printcolors.printcLabel(msg), 2)
1609     logger.write("\n", 2)
1610     
1611     d_files_to_add={}  # content of the archive
1612
1613     # a dict to hold paths that will need to be substitute for users recompilations
1614     d_paths_to_substitute={}  
1615
1616     if options.binaries:
1617         d_bin_files_to_add = binary_package(runner.cfg,
1618                                             logger,
1619                                             options,
1620                                             tmp_working_dir)
1621         # for all binaries dir, store the substitution that will be required 
1622         # for extra compilations
1623         for key in d_bin_files_to_add:
1624             if key.endswith("(bin)"):
1625                 source_dir = d_bin_files_to_add[key][0]
1626                 path_in_archive = d_bin_files_to_add[key][1].replace(
1627                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1628                    runner.cfg.INTERNAL.config.install_dir)
1629                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1630                     # if basename is the same we will just substitute the dirname 
1631                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1632                         os.path.dirname(path_in_archive)
1633                 else:
1634                     d_paths_to_substitute[source_dir]=path_in_archive
1635
1636         d_files_to_add.update(d_bin_files_to_add)
1637     if options.sources:
1638         d_files_to_add.update(source_package(runner,
1639                                         runner.cfg,
1640                                         logger, 
1641                                         options,
1642                                         tmp_working_dir))
1643         if options.binaries:
1644             # for archives with bin and sources we provide a shell script able to 
1645             # install binaries for compilation
1646             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1647                                                       tmp_working_dir,
1648                                                       d_paths_to_substitute,
1649                                                       "install_bin.sh")
1650             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1651             logger.write("substitutions that need to be done later : \n", 5)
1652             logger.write(str(d_paths_to_substitute), 5)
1653             logger.write("\n", 5)
1654     else:
1655         # --salomeTool option is not considered when --sources is selected, as this option
1656         # already brings salomeTool!
1657         if options.sat:
1658             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1659                                   options, logger))
1660         
1661     if options.project:
1662         DBG.write("config for package %s" % options.project, runner.cfg)
1663         d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1664
1665     if not(d_files_to_add):
1666         msg = _("Error: Empty dictionnary to build the archive!\n")
1667         logger.write(src.printcolors.printcError(msg), 1)
1668         logger.write("\n", 1)
1669         return 1
1670
1671     # Add the README file in the package
1672     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1673     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1674
1675     # Add the additional files of option add_files
1676     if options.add_files:
1677         for file_path in options.add_files:
1678             if not os.path.exists(file_path):
1679                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1680                 continue
1681             file_name = os.path.basename(file_path)
1682             d_files_to_add[file_name] = (file_path, file_name)
1683
1684     logger.write("\n", 2)
1685     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1686     logger.write("\n", 2)
1687     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1688
1689     res = 0
1690     try:
1691         # Creating the object tarfile
1692         tar = tarfile.open(path_targz, mode='w:gz')
1693         
1694         # get the filtering function if needed
1695         if old_python:
1696             filter_function = exclude_VCS_and_extensions_26
1697         else:
1698             filter_function = exclude_VCS_and_extensions
1699
1700         # Add the files to the tarfile object
1701         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1702         tar.close()
1703     except KeyboardInterrupt:
1704         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1705         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1706         # remove the working directory
1707         shutil.rmtree(tmp_working_dir)
1708         logger.write(_("OK"), 1)
1709         logger.write(_("\n"), 1)
1710         return 1
1711     
1712     # case if no application, only package sat as 'sat package -t'
1713     try:
1714         app = runner.cfg.APPLICATION
1715     except:
1716         app = None
1717
1718     # unconditionaly remove the tmp_local_working_dir
1719     if app is not None:
1720         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1721         if os.path.isdir(tmp_local_working_dir):
1722             shutil.rmtree(tmp_local_working_dir)
1723
1724     # remove the tmp directory, unless user has registered as developer
1725     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1726         shutil.rmtree(tmp_working_dir)
1727     
1728     # Print again the path of the package
1729     logger.write("\n", 2)
1730     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1731     
1732     return res