]> SALOME platform Git repositories - tools/sat.git/blob - commands/package.py
Salome HOME
sat #18867 : pour les url des bases git : substitution des references par leur valeur...
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28 import sys
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
35
36 BINARY = "binary"
37 SOURCE = "Source"
38 PROJECT = "Project"
39 SAT = "Sat"
40
41 ARCHIVE_DIR = "ARCHIVES"
42 PROJECT_DIR = "PROJECT"
43
44 IGNORED_DIRS = [".git", ".svn"]
45 IGNORED_EXTENSIONS = []
46
47 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
48
49 PROJECT_TEMPLATE = """#!/usr/bin/env python
50 #-*- coding:utf-8 -*-
51
52 # The path to the archive root directory
53 root_path : $PWD + "/../"
54 # path to the PROJECT
55 project_path : $PWD + "/"
56
57 # Where to search the archives of the products
58 ARCHIVEPATH : $root_path + "ARCHIVES"
59 # Where to search the pyconf of the applications
60 APPLICATIONPATH : $project_path + "applications/"
61 # Where to search the pyconf of the products
62 PRODUCTPATH : $project_path + "products/"
63 # Where to search the pyconf of the jobs of the project
64 JOBPATH : $project_path + "jobs/"
65 # Where to search the pyconf of the machines of the project
66 MACHINEPATH : $project_path + "machines/"
67 """
68
69 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
70 #-*- coding:utf-8 -*-
71
72   LOCAL :
73   {
74     base : 'default'
75     workdir : 'default'
76     log_dir : 'default'
77     archive_dir : 'default'
78     VCS : 'unknown'
79     tag : 'unknown'
80   }
81
82 PROJECTS :
83 {
84 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
85 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
86 }
87 """)
88
89 # Define all possible option for the package command :  sat package <options>
90 parser = src.options.Options()
91 parser.add_option('b', 'binaries', 'boolean', 'binaries',
92     _('Optional: Produce a binary package.'), False)
93 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
94     _('Optional: Only binary package: produce the archive even if '
95       'there are some missing products.'), False)
96 parser.add_option('s', 'sources', 'boolean', 'sources',
97     _('Optional: Produce a compilable archive of the sources of the '
98       'application.'), False)
99 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
100     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
101       'Sat prepare will use VCS mode instead to retrieve them'),
102     False)
103 parser.add_option('', 'ftp', 'boolean', 'ftp',
104     _('Optional: Do not embed archives for products in archive mode.' 
105     'Sat prepare will use ftp instead to retrieve them'),
106     False)
107 parser.add_option('p', 'project', 'string', 'project',
108     _('Optional: Produce an archive that contains a project.'), "")
109 parser.add_option('t', 'salometools', 'boolean', 'sat',
110     _('Optional: Produce an archive that contains salomeTools.'), False)
111 parser.add_option('n', 'name', 'string', 'name',
112     _('Optional: The name or full path of the archive.'), None)
113 parser.add_option('', 'add_files', 'list2', 'add_files',
114     _('Optional: The list of additional files to add to the archive.'), [])
115 parser.add_option('', 'without_properties', 'properties', 'without_properties',
116     _('Optional: Filter the products by their properties.\n\tSyntax: '
117       '--without_properties <property>:<value>'))
118
119
120 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
121     '''Create an archive containing all directories and files that are given in
122        the d_content argument.
123     
124     :param tar tarfile: The tarfile instance used to make the archive.
125     :param name_archive str: The name of the archive to make.
126     :param d_content dict: The dictionary that contain all directories and files
127                            to add in the archive.
128                            d_content[label] = 
129                                         (path_on_local_machine, path_in_archive)
130     :param logger Logger: the logging instance
131     :param f_exclude Function: the function that filters
132     :return: 0 if success, 1 if not.
133     :rtype: int
134     '''
135     # get the max length of the messages in order to make the display
136     max_len = len(max(d_content.keys(), key=len))
137     
138     success = 0
139     # loop over each directory or file stored in the d_content dictionary
140     names = sorted(d_content.keys())
141     DBG.write("add tar names", names)
142
143     # used to avoid duplications (for pip install in python, or single_install_dir cases)
144     already_added=set() 
145     for name in names:
146         # display information
147         len_points = max_len - len(name) + 3
148         local_path, archive_path = d_content[name]
149         in_archive = os.path.join(name_archive, archive_path)
150         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
151         # Get the local path and the path in archive 
152         # of the directory or file to add
153         # Add it in the archive
154         try:
155             key=local_path+"->"+in_archive
156             if key not in already_added:
157                 if old_python:
158                     tar.add(local_path,
159                                  arcname=in_archive,
160                                  exclude=exclude_VCS_and_extensions_26)
161                 else:
162                     tar.add(local_path,
163                                  arcname=in_archive,
164                                  filter=exclude_VCS_and_extensions)
165                 already_added.add(key)
166             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
167         except Exception as e:
168             logger.write(src.printcolors.printcError(_("KO ")), 3)
169             logger.write(str(e), 3)
170             success = 1
171         logger.write("\n", 3)
172     return success
173
174
175 def exclude_VCS_and_extensions_26(filename):
176     ''' The function that is used to exclude from package the link to the 
177         VCS repositories (like .git) (only for python 2.6)
178
179     :param filename Str: The filname to exclude (or not).
180     :return: True if the file has to be exclude
181     :rtype: Boolean
182     '''
183     for dir_name in IGNORED_DIRS:
184         if dir_name in filename:
185             return True
186     for extension in IGNORED_EXTENSIONS:
187         if filename.endswith(extension):
188             return True
189     return False
190
191 def exclude_VCS_and_extensions(tarinfo):
192     ''' The function that is used to exclude from package the link to the 
193         VCS repositories (like .git)
194
195     :param filename Str: The filname to exclude (or not).
196     :return: None if the file has to be exclude
197     :rtype: tarinfo or None
198     '''
199     filename = tarinfo.name
200     for dir_name in IGNORED_DIRS:
201         if dir_name in filename:
202             return None
203     for extension in IGNORED_EXTENSIONS:
204         if filename.endswith(extension):
205             return None
206     return tarinfo
207
208 def produce_relative_launcher(config,
209                               logger,
210                               file_dir,
211                               file_name,
212                               binaries_dir_name):
213     '''Create a specific SALOME launcher for the binary package. This launcher 
214        uses relative paths.
215     
216     :param config Config: The global configuration.
217     :param logger Logger: the logging instance
218     :param file_dir str: the directory where to put the launcher
219     :param file_name str: The launcher name
220     :param binaries_dir_name str: the name of the repository where the binaries
221                                   are, in the archive.
222     :return: the path of the produced launcher
223     :rtype: str
224     '''
225     
226     # get KERNEL installation path 
227     kernel_info = src.product.get_product_config(config, "KERNEL")
228     kernel_base_name=os.path.basename(kernel_info.install_dir)
229     if kernel_base_name.startswith("config"):
230         # case of kernel installed in base. We remove "config-i"
231         kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
232     
233     kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
234
235     # set kernel bin dir (considering fhs property)
236     kernel_cfg = src.product.get_product_config(config, "KERNEL")
237     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
238         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
239     else:
240         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
241
242     # check if the application contains an application module
243     # check also if the application has a distene product, 
244     # in this case get its licence file name
245     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
246     salome_application_name="Not defined" 
247     distene_licence_file_name=False
248     for prod_name, prod_info in l_product_info:
249         # look for a "salome application" and a distene product
250         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
251             distene_licence_file_name = src.product.product_has_licence(prod_info, 
252                                             config.PATHS.LICENCEPATH) 
253         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
254             salome_application_name=prod_info.name
255
256     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
257     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
258     if salome_application_name == "Not defined":
259         app_root_dir=kernel_root_dir
260     else:
261         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
262
263     additional_env={}
264     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
265                                                    config.VARS.sep + bin_kernel_install_dir
266     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
267         additional_env['sat_python_version'] = 3
268     else:
269         additional_env['sat_python_version'] = 2
270
271     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
272
273     # create an environment file writer
274     writer = src.environment.FileEnvWriter(config,
275                                            logger,
276                                            file_dir,
277                                            src_root=None,
278                                            env_info=None)
279     
280     filepath = os.path.join(file_dir, file_name)
281     # Write
282     writer.write_env_file(filepath,
283                           False,  # for launch
284                           "cfgForPy",
285                           additional_env=additional_env,
286                           no_path_init="False",
287                           for_package = binaries_dir_name)
288     
289     # Little hack to put out_dir_Path outside the strings
290     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
291     src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
292     
293     # A hack to put a call to a file for distene licence.
294     # It does nothing to an application that has no distene product
295     if distene_licence_file_name:
296         logger.write("Application has a distene licence file! We use it in package launcher", 5)
297         hack_for_distene_licence(filepath, distene_licence_file_name)
298        
299     # change the rights in order to make the file executable for everybody
300     os.chmod(filepath,
301              stat.S_IRUSR |
302              stat.S_IRGRP |
303              stat.S_IROTH |
304              stat.S_IWUSR |
305              stat.S_IXUSR |
306              stat.S_IXGRP |
307              stat.S_IXOTH)
308
309     return filepath
310
311 def hack_for_distene_licence(filepath, licence_file):
312     '''Replace the distene licence env variable by a call to a file.
313     
314     :param filepath Str: The path to the launcher to modify.
315     '''  
316     shutil.move(filepath, filepath + "_old")
317     fileout= filepath
318     filein = filepath + "_old"
319     fin = open(filein, "r")
320     fout = open(fileout, "w")
321     text = fin.readlines()
322     # Find the Distene section
323     num_line = -1
324     for i,line in enumerate(text):
325         if "# Set DISTENE License" in line:
326             num_line = i
327             break
328     if num_line == -1:
329         # No distene product, there is nothing to do
330         fin.close()
331         for line in text:
332             fout.write(line)
333         fout.close()
334         return
335     del text[num_line +1]
336     del text[num_line +1]
337     text_to_insert ="""    try:
338         distene_licence_file=r"%s"
339         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
340             import importlib.util
341             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
342             distene=importlib.util.module_from_spec(spec_dist)
343             spec_dist.loader.exec_module(distene)
344         else:
345             import imp
346             distene = imp.load_source('distene_licence', distene_licence_file)
347         distene.set_distene_variables(context)
348     except:
349         pass\n"""  % licence_file
350     text.insert(num_line + 1, text_to_insert)
351     for line in text:
352         fout.write(line)
353     fin.close()    
354     fout.close()
355     return
356     
357 def produce_relative_env_files(config,
358                               logger,
359                               file_dir,
360                               binaries_dir_name):
361     '''Create some specific environment files for the binary package. These 
362        files use relative paths.
363     
364     :param config Config: The global configuration.
365     :param logger Logger: the logging instance
366     :param file_dir str: the directory where to put the files
367     :param binaries_dir_name str: the name of the repository where the binaries
368                                   are, in the archive.
369     :return: the list of path of the produced environment files
370     :rtype: List
371     '''  
372     # create an environment file writer
373     writer = src.environment.FileEnvWriter(config,
374                                            logger,
375                                            file_dir,
376                                            src_root=None)
377     
378     if src.architecture.is_windows():
379       shell = "bat"
380       filename  = "env_launch.bat"
381     else:
382       shell = "bash"
383       filename  = "env_launch.sh"
384
385     # Write
386     filepath = writer.write_env_file(filename,
387                           False, # for launch
388                           shell,
389                           for_package = binaries_dir_name)
390
391     # Little hack to put out_dir_Path as environment variable
392     if src.architecture.is_windows() :
393       src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
394       src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
395       src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
396     else:
397       src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
398       src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
399
400     # change the rights in order to make the file executable for everybody
401     os.chmod(filepath,
402              stat.S_IRUSR |
403              stat.S_IRGRP |
404              stat.S_IROTH |
405              stat.S_IWUSR |
406              stat.S_IXUSR |
407              stat.S_IXGRP |
408              stat.S_IXOTH)
409     
410     return filepath
411
412 def produce_install_bin_file(config,
413                              logger,
414                              file_dir,
415                              d_sub,
416                              file_name):
417     '''Create a bash shell script which do substitutions in BIRARIES dir 
418        in order to use it for extra compilations.
419     
420     :param config Config: The global configuration.
421     :param logger Logger: the logging instance
422     :param file_dir str: the directory where to put the files
423     :param d_sub, dict: the dictionnary that contains the substitutions to be done
424     :param file_name str: the name of the install script file
425     :return: the produced file
426     :rtype: str
427     '''  
428     # Write
429     filepath = os.path.join(file_dir, file_name)
430     # open the file and write into it
431     # use codec utf-8 as sat variables are in unicode
432     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
433         installbin_template_path = os.path.join(config.VARS.internal_dir,
434                                         "INSTALL_BIN.template")
435         
436         # build the name of the directory that will contain the binaries
437         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
438         # build the substitution loop
439         loop_cmd = "for f in $(grep -RIl"
440         for key in d_sub:
441             loop_cmd += " -e "+ key
442         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
443                     '); do\n     sed -i "\n'
444         for key in d_sub:
445             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
446         loop_cmd += '            " $f\ndone'
447
448         d={}
449         d["BINARIES_DIR"] = binaries_dir_name
450         d["SUBSTITUTION_LOOP"]=loop_cmd
451         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
452         
453         # substitute the template and write it in file
454         content=src.template.substitute(installbin_template_path, d)
455         installbin_file.write(content)
456         # change the rights in order to make the file executable for everybody
457         os.chmod(filepath,
458                  stat.S_IRUSR |
459                  stat.S_IRGRP |
460                  stat.S_IROTH |
461                  stat.S_IWUSR |
462                  stat.S_IXUSR |
463                  stat.S_IXGRP |
464                  stat.S_IXOTH)
465     
466     return filepath
467
468 def product_appli_creation_script(config,
469                                   logger,
470                                   file_dir,
471                                   binaries_dir_name):
472     '''Create a script that can produce an application (EDF style) in the binary
473        package.
474     
475     :param config Config: The global configuration.
476     :param logger Logger: the logging instance
477     :param file_dir str: the directory where to put the file
478     :param binaries_dir_name str: the name of the repository where the binaries
479                                   are, in the archive.
480     :return: the path of the produced script file
481     :rtype: Str
482     '''
483     template_name = "create_appli.py.for_bin_packages.template"
484     template_path = os.path.join(config.VARS.internal_dir, template_name)
485     text_to_fill = open(template_path, "r").read()
486     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
487                                         '"' + binaries_dir_name + '"')
488     
489     text_to_add = ""
490     for product_name in get_SALOME_modules(config):
491         product_info = src.product.get_product_config(config, product_name)
492        
493         if src.product.product_is_smesh_plugin(product_info):
494             continue
495
496         if 'install_dir' in product_info and bool(product_info.install_dir):
497             if src.product.product_is_cpp(product_info):
498                 # cpp module
499                 for cpp_name in src.product.get_product_components(product_info):
500                     line_to_add = ("<module name=\"" + 
501                                    cpp_name + 
502                                    "\" gui=\"yes\" path=\"''' + "
503                                    "os.path.join(dir_bin_name, \"" + 
504                                    cpp_name + "\") + '''\"/>")
505             else:
506                 # regular module
507                 line_to_add = ("<module name=\"" + 
508                                product_name + 
509                                "\" gui=\"yes\" path=\"''' + "
510                                "os.path.join(dir_bin_name, \"" + 
511                                product_name + "\") + '''\"/>")
512             text_to_add += line_to_add + "\n"
513     
514     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
515     
516     tmp_file_path = os.path.join(file_dir, "create_appli.py")
517     ff = open(tmp_file_path, "w")
518     ff.write(filled_text)
519     ff.close()
520     
521     # change the rights in order to make the file executable for everybody
522     os.chmod(tmp_file_path,
523              stat.S_IRUSR |
524              stat.S_IRGRP |
525              stat.S_IROTH |
526              stat.S_IWUSR |
527              stat.S_IXUSR |
528              stat.S_IXGRP |
529              stat.S_IXOTH)
530     
531     return tmp_file_path
532
533 def binary_package(config, logger, options, tmp_working_dir):
534     '''Prepare a dictionary that stores all the needed directories and files to
535        add in a binary package.
536     
537     :param config Config: The global configuration.
538     :param logger Logger: the logging instance
539     :param options OptResult: the options of the launched command
540     :param tmp_working_dir str: The temporary local directory containing some 
541                                 specific directories or files needed in the 
542                                 binary package
543     :return: the dictionary that stores all the needed directories and files to
544              add in a binary package.
545              {label : (path_on_local_machine, path_in_archive)}
546     :rtype: dict
547     '''
548
549     # Get the list of product installation to add to the archive
550     l_products_name = sorted(config.APPLICATION.products.keys())
551     l_product_info = src.product.get_products_infos(l_products_name,
552                                                     config)
553     l_install_dir = []
554     l_source_dir = []
555     l_not_installed = []
556     l_sources_not_present = []
557     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
558     if ("APPLICATION" in config  and
559         "properties"  in config.APPLICATION  and
560         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
561         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
562             generate_mesa_launcher=True
563
564     for prod_name, prod_info in l_product_info:
565         # skip product with property not_in_package set to yes
566         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
567             continue  
568
569         # Add the sources of the products that have the property 
570         # sources_in_package : "yes"
571         if src.get_property_in_product_cfg(prod_info,
572                                            "sources_in_package") == "yes":
573             if os.path.exists(prod_info.source_dir):
574                 l_source_dir.append((prod_name, prod_info.source_dir))
575             else:
576                 l_sources_not_present.append(prod_name)
577
578         # ignore the native and fixed products for install directories
579         if (src.product.product_is_native(prod_info) 
580                 or src.product.product_is_fixed(prod_info)
581                 or not src.product.product_compiles(prod_info)):
582             continue
583         if src.product.check_installation(config, prod_info):
584             l_install_dir.append((prod_name, prod_info.install_dir))
585         else:
586             l_not_installed.append(prod_name)
587         
588         # Add also the cpp generated modules (if any)
589         if src.product.product_is_cpp(prod_info):
590             # cpp module
591             for name_cpp in src.product.get_product_components(prod_info):
592                 install_dir = os.path.join(config.APPLICATION.workdir,
593                                            config.INTERNAL.config.install_dir,
594                                            name_cpp) 
595                 if os.path.exists(install_dir):
596                     l_install_dir.append((name_cpp, install_dir))
597                 else:
598                     l_not_installed.append(name_cpp)
599         
600     # check the name of the directory that (could) contains the binaries 
601     # from previous detar
602     binaries_from_detar = os.path.join(
603                               config.APPLICATION.workdir,
604                               config.INTERNAL.config.binary_dir + config.VARS.dist)
605     if os.path.exists(binaries_from_detar):
606          logger.write("""
607 WARNING: existing binaries directory from previous detar installation:
608          %s
609          To make new package from this, you have to: 
610          1) install binaries in INSTALL directory with the script "install_bin.sh" 
611             see README file for more details
612          2) or recompile everything in INSTALL with "sat compile" command 
613             this step is long, and requires some linux packages to be installed 
614             on your system\n
615 """ % binaries_from_detar)
616     
617     # Print warning or error if there are some missing products
618     if len(l_not_installed) > 0:
619         text_missing_prods = ""
620         for p_name in l_not_installed:
621             text_missing_prods += " - " + p_name + "\n"
622         if not options.force_creation:
623             msg = _("ERROR: there are missing product installations:")
624             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
625                                      text_missing_prods),
626                          1)
627             raise src.SatException(msg)
628         else:
629             msg = _("WARNING: there are missing products installations:")
630             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
631                                      text_missing_prods),
632                          1)
633
634     # Do the same for sources
635     if len(l_sources_not_present) > 0:
636         text_missing_prods = ""
637         for p_name in l_sources_not_present:
638             text_missing_prods += "-" + p_name + "\n"
639         if not options.force_creation:
640             msg = _("ERROR: there are missing product sources:")
641             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
642                                      text_missing_prods),
643                          1)
644             raise src.SatException(msg)
645         else:
646             msg = _("WARNING: there are missing products sources:")
647             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
648                                      text_missing_prods),
649                          1)
650  
651     # construct the name of the directory that will contain the binaries
652     if src.architecture.is_windows():
653         binaries_dir_name = config.INTERNAL.config.binary_dir
654     else:
655         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
656     # construct the correlation table between the product names, there 
657     # actual install directories and there install directory in archive
658     d_products = {}
659     for prod_name, install_dir in l_install_dir:
660         prod_base_name=os.path.basename(install_dir)
661         if prod_base_name.startswith("config"):
662             # case of a products installed in base. We remove "config-i"
663             prod_base_name=os.path.basename(os.path.dirname(install_dir))
664         path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
665         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
666         
667     for prod_name, source_dir in l_source_dir:
668         path_in_archive = os.path.join("SOURCES", prod_name)
669         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
670
671     # for packages of SALOME applications including KERNEL, 
672     # we produce a salome launcher or a virtual application (depending on salome version)
673     if 'KERNEL' in config.APPLICATION.products:
674         VersionSalome = src.get_salome_version(config)
675         # Case where SALOME has the launcher that uses the SalomeContext API
676         if VersionSalome >= 730:
677             # create the relative launcher and add it to the files to add
678             launcher_name = src.get_launcher_name(config)
679             launcher_package = produce_relative_launcher(config,
680                                                  logger,
681                                                  tmp_working_dir,
682                                                  launcher_name,
683                                                  binaries_dir_name)
684             d_products["launcher"] = (launcher_package, launcher_name)
685
686             # if the application contains mesa products, we generate in addition to the 
687             # classical salome launcher a launcher using mesa and called mesa_salome 
688             # (the mesa launcher will be used for remote usage through ssh).
689             if generate_mesa_launcher:
690                 #if there is one : store the use_mesa property
691                 restore_use_mesa_option=None
692                 if ('properties' in config.APPLICATION and 
693                     'use_mesa' in config.APPLICATION.properties):
694                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
695
696                 # activate mesa property, and generate a mesa launcher
697                 src.activate_mesa_property(config)  #activate use_mesa property
698                 launcher_mesa_name="mesa_"+launcher_name
699                 launcher_package_mesa = produce_relative_launcher(config,
700                                                      logger,
701                                                      tmp_working_dir,
702                                                      launcher_mesa_name,
703                                                      binaries_dir_name)
704                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
705
706                 # if there was a use_mesa value, we restore it
707                 # else we set it to the default value "no"
708                 if restore_use_mesa_option != None:
709                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
710                 else:
711                     config.APPLICATION.properties.use_mesa="no"
712
713             if options.sources:
714                 # if we mix binaries and sources, we add a copy of the launcher, 
715                 # prefixed  with "bin",in order to avoid clashes
716                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
717         else:
718             # Provide a script for the creation of an application EDF style
719             appli_script = product_appli_creation_script(config,
720                                                         logger,
721                                                         tmp_working_dir,
722                                                         binaries_dir_name)
723             
724             d_products["appli script"] = (appli_script, "create_appli.py")
725
726     # Put also the environment file
727     env_file = produce_relative_env_files(config,
728                                            logger,
729                                            tmp_working_dir,
730                                            binaries_dir_name)
731
732     if src.architecture.is_windows():
733       filename  = "env_launch.bat"
734     else:
735       filename  = "env_launch.sh"
736     d_products["environment file"] = (env_file, filename)      
737     return d_products
738
739 def source_package(sat, config, logger, options, tmp_working_dir):
740     '''Prepare a dictionary that stores all the needed directories and files to
741        add in a source package.
742     
743     :param config Config: The global configuration.
744     :param logger Logger: the logging instance
745     :param options OptResult: the options of the launched command
746     :param tmp_working_dir str: The temporary local directory containing some 
747                                 specific directories or files needed in the 
748                                 binary package
749     :return: the dictionary that stores all the needed directories and files to
750              add in a source package.
751              {label : (path_on_local_machine, path_in_archive)}
752     :rtype: dict
753     '''
754     
755     d_archives={}
756     # Get all the products that are prepared using an archive
757     # unless ftp mode is specified (in this case the user of the
758     # archive will get the sources through the ftp mode of sat prepare
759     if not options.ftp:
760         logger.write("Find archive products ... ")
761         d_archives, l_pinfo_vcs = get_archives(config, logger)
762         logger.write("Done\n")
763
764     d_archives_vcs = {}
765     if not options.with_vcs and len(l_pinfo_vcs) > 0:
766         # Make archives with the products that are not prepared using an archive
767         # (git, cvs, svn, etc)
768         logger.write("Construct archives for vcs products ... ")
769         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
770                                           sat,
771                                           config,
772                                           logger,
773                                           tmp_working_dir)
774         logger.write("Done\n")
775
776     # Create a project
777     logger.write("Create the project ... ")
778     d_project = create_project_for_src_package(config,
779                                                tmp_working_dir,
780                                                options.with_vcs,
781                                                options.ftp)
782     logger.write("Done\n")
783     
784     # Add salomeTools
785     tmp_sat = add_salomeTools(config, tmp_working_dir)
786     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
787     
788     # Add a sat symbolic link if not win
789     if not src.architecture.is_windows():
790         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
791         try:
792             t = os.getcwd()
793         except:
794             # In the jobs, os.getcwd() can fail
795             t = config.LOCAL.workdir
796         os.chdir(tmp_working_dir)
797         if os.path.lexists(tmp_satlink_path):
798             os.remove(tmp_satlink_path)
799         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
800         os.chdir(t)
801         
802         d_sat["sat link"] = (tmp_satlink_path, "sat")
803     
804     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
805     return d_source
806
807 def get_archives(config, logger):
808     '''Find all the products that are get using an archive and all the products
809        that are get using a vcs (git, cvs, svn) repository.
810     
811     :param config Config: The global configuration.
812     :param logger Logger: the logging instance
813     :return: the dictionary {name_product : 
814              (local path of its archive, path in the package of its archive )}
815              and the list of specific configuration corresponding to the vcs 
816              products
817     :rtype: (Dict, List)
818     '''
819     # Get the list of product informations
820     l_products_name = config.APPLICATION.products.keys()
821     l_product_info = src.product.get_products_infos(l_products_name,
822                                                     config)
823     d_archives = {}
824     l_pinfo_vcs = []
825     for p_name, p_info in l_product_info:
826         # skip product with property not_in_package set to yes
827         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
828             continue  
829         # ignore the native and fixed products
830         if (src.product.product_is_native(p_info) 
831                 or src.product.product_is_fixed(p_info)):
832             continue
833         if p_info.get_source == "archive":
834             archive_path = p_info.archive_info.archive_name
835             archive_name = os.path.basename(archive_path)
836             d_archives[p_name] = (archive_path,
837                                   os.path.join(ARCHIVE_DIR, archive_name))
838             if (src.appli_test_property(config,"pip", "yes") and 
839                 src.product.product_test_property(p_info,"pip", "yes")):
840                 # if pip mode is activated, and product is managed by pip
841                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
842                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
843                     "%s-%s*" % (p_info.name, p_info.version))
844                 pip_wheel_path=glob.glob(pip_wheel_pattern)
845                 msg_pip_not_found="Error in get_archive, pip wheel for "\
846                                   "product %s-%s was not found in %s directory"
847                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
848                                   "product %s-%s were found in %s directory"
849                 if len(pip_wheel_path)==0:
850                     raise src.SatException(msg_pip_not_found %\
851                         (p_info.name, p_info.version, pip_wheels_dir))
852                 if len(pip_wheel_path)>1:
853                     raise src.SatException(msg_pip_two_or_more %\
854                         (p_info.name, p_info.version, pip_wheels_dir))
855
856                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
857                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
858                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
859         else:
860             # this product is not managed by archive, 
861             # an archive of the vcs directory will be created by get_archive_vcs
862             l_pinfo_vcs.append((p_name, p_info)) 
863             
864     return d_archives, l_pinfo_vcs
865
866 def add_salomeTools(config, tmp_working_dir):
867     '''Prepare a version of salomeTools that has a specific local.pyconf file 
868        configured for a source package.
869
870     :param config Config: The global configuration.
871     :param tmp_working_dir str: The temporary local directory containing some 
872                                 specific directories or files needed in the 
873                                 source package
874     :return: The path to the local salomeTools directory to add in the package
875     :rtype: str
876     '''
877     # Copy sat in the temporary working directory
878     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
879     sat_running_path = src.Path(config.VARS.salometoolsway)
880     sat_running_path.copy(sat_tmp_path)
881     
882     # Update the local.pyconf file that contains the path to the project
883     local_pyconf_name = "local.pyconf"
884     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
885     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
886     # Remove the .pyconf file in the root directory of salomeTools if there is
887     # any. (For example when launching jobs, a pyconf file describing the jobs 
888     # can be here and is not useful) 
889     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
890     for file_or_dir in files_or_dir_SAT:
891         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
892             file_path = os.path.join(tmp_working_dir,
893                                      "salomeTools",
894                                      file_or_dir)
895             os.remove(file_path)
896     
897     ff = open(local_pyconf_file, "w")
898     ff.write(LOCAL_TEMPLATE)
899     ff.close()
900     
901     return sat_tmp_path.path
902
903 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
904     '''For sources package that require that all products are get using an 
905        archive, one has to create some archive for the vcs products.
906        So this method calls the clean and source command of sat and then create
907        the archives.
908
909     :param l_pinfo_vcs List: The list of specific configuration corresponding to
910                              each vcs product
911     :param sat Sat: The Sat instance that can be called to clean and source the
912                     products
913     :param config Config: The global configuration.
914     :param logger Logger: the logging instance
915     :param tmp_working_dir str: The temporary local directory containing some 
916                                 specific directories or files needed in the 
917                                 source package
918     :return: the dictionary that stores all the archives to add in the source 
919              package. {label : (path_on_local_machine, path_in_archive)}
920     :rtype: dict
921     '''
922     # clean the source directory of all the vcs products, then use the source 
923     # command and thus construct an archive that will not contain the patches
924     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
925     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
926       logger.write(_("\nclean sources\n"))
927       args_clean = config.VARS.application
928       args_clean += " --sources --products "
929       args_clean += ",".join(l_prod_names)
930       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
931       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
932     if True:
933       # source
934       logger.write(_("get sources\n"))
935       args_source = config.VARS.application
936       args_source += " --products "
937       args_source += ",".join(l_prod_names)
938       svgDir = sat.cfg.APPLICATION.workdir
939       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
940       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
941       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
942       # DBG.write("sat config id", id(sat.cfg), True)
943       # shit as config is not same id() as for sat.source()
944       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
945       import source
946       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
947       
948       # make the new archives
949       d_archives_vcs = {}
950       for pn, pinfo in l_pinfo_vcs:
951           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
952           logger.write("make archive vcs '%s'\n" % path_archive)
953           d_archives_vcs[pn] = (path_archive,
954                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
955       sat.cfg.APPLICATION.workdir = svgDir
956       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
957     return d_archives_vcs
958
959 def make_archive(prod_name, prod_info, where):
960     '''Create an archive of a product by searching its source directory.
961
962     :param prod_name str: The name of the product.
963     :param prod_info Config: The specific configuration corresponding to the 
964                              product
965     :param where str: The path of the repository where to put the resulting 
966                       archive
967     :return: The path of the resulting archive
968     :rtype: str
969     '''
970     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
971     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
972     local_path = prod_info.source_dir
973     if old_python:
974         tar_prod.add(local_path,
975                      arcname=prod_name,
976                      exclude=exclude_VCS_and_extensions_26)
977     else:
978         tar_prod.add(local_path,
979                      arcname=prod_name,
980                      filter=exclude_VCS_and_extensions)
981     tar_prod.close()
982     return path_targz_prod       
983
984 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
985     '''Create a specific project for a source package.
986
987     :param config Config: The global configuration.
988     :param tmp_working_dir str: The temporary local directory containing some 
989                                 specific directories or files needed in the 
990                                 source package
991     :param with_vcs boolean: True if the package is with vcs products (not 
992                              transformed into archive products)
993     :param with_ftp boolean: True if the package use ftp servers to get archives
994     :return: The dictionary 
995              {"project" : (produced project, project path in the archive)}
996     :rtype: Dict
997     '''
998
999     # Create in the working temporary directory the full project tree
1000     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1001     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
1002                                          "products")
1003     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
1004                                          "products",
1005                                          "compil_scripts")
1006     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
1007                                          "products",
1008                                          "env_scripts")
1009     patches_tmp_dir = os.path.join(project_tmp_dir,
1010                                          "products",
1011                                          "patches")
1012     application_tmp_dir = os.path.join(project_tmp_dir,
1013                                          "applications")
1014     for directory in [project_tmp_dir,
1015                       compil_scripts_tmp_dir,
1016                       env_scripts_tmp_dir,
1017                       patches_tmp_dir,
1018                       application_tmp_dir]:
1019         src.ensure_path_exists(directory)
1020
1021     # Create the pyconf that contains the information of the project
1022     project_pyconf_name = "project.pyconf"        
1023     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1024     ff = open(project_pyconf_file, "w")
1025     ff.write(PROJECT_TEMPLATE)
1026     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1027         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
1028         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1029             ftp_path=ftp_path+":"+ftpserver
1030         ftp_path+='"'
1031         ff.write("# ftp servers where to search for prerequisite archives\n")
1032         ff.write(ftp_path)
1033     # add licence paths if any
1034     if len(config.PATHS.LICENCEPATH) > 0:  
1035         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1036         for path in config.PATHS.LICENCEPATH[1:]:
1037             licence_path=licence_path+":"+path
1038         licence_path+='"'
1039         ff.write("\n# Where to search for licences\n")
1040         ff.write(licence_path)
1041         
1042
1043     ff.close()
1044     
1045     # Loop over the products to get there pyconf and all the scripts 
1046     # (compilation, environment, patches)
1047     # and create the pyconf file to add to the project
1048     lproducts_name = config.APPLICATION.products.keys()
1049     l_products = src.product.get_products_infos(lproducts_name, config)
1050     for p_name, p_info in l_products:
1051         # skip product with property not_in_package set to yes
1052         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1053             continue  
1054         find_product_scripts_and_pyconf(p_name,
1055                                         p_info,
1056                                         config,
1057                                         with_vcs,
1058                                         compil_scripts_tmp_dir,
1059                                         env_scripts_tmp_dir,
1060                                         patches_tmp_dir,
1061                                         products_pyconf_tmp_dir)
1062     
1063     # for the application pyconf, we write directly the config
1064     # don't search for the original pyconf file
1065     # to avoid problems with overwrite sections and rm_products key
1066     write_application_pyconf(config, application_tmp_dir)
1067     
1068     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1069     return d_project
1070
1071 def find_product_scripts_and_pyconf(p_name,
1072                                     p_info,
1073                                     config,
1074                                     with_vcs,
1075                                     compil_scripts_tmp_dir,
1076                                     env_scripts_tmp_dir,
1077                                     patches_tmp_dir,
1078                                     products_pyconf_tmp_dir):
1079     '''Create a specific pyconf file for a given product. Get its environment 
1080        script, its compilation script and patches and put it in the temporary
1081        working directory. This method is used in the source package in order to
1082        construct the specific project.
1083
1084     :param p_name str: The name of the product.
1085     :param p_info Config: The specific configuration corresponding to the 
1086                              product
1087     :param config Config: The global configuration.
1088     :param with_vcs boolean: True if the package is with vcs products (not 
1089                              transformed into archive products)
1090     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1091                                        scripts directory of the project.
1092     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1093                                     directory of the project.
1094     :param patches_tmp_dir str: The path to the temporary patch scripts 
1095                                 directory of the project.
1096     :param products_pyconf_tmp_dir str: The path to the temporary product 
1097                                         scripts directory of the project.
1098     '''
1099     
1100     # read the pyconf of the product
1101     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1102
1103     # find the compilation script if any
1104     if src.product.product_has_script(p_info):
1105         compil_script_path = src.Path(p_info.compil_script)
1106         compil_script_path.copy(compil_scripts_tmp_dir)
1107
1108     # find the environment script if any
1109     if src.product.product_has_env_script(p_info):
1110         env_script_path = src.Path(p_info.environ.env_script)
1111         env_script_path.copy(env_scripts_tmp_dir)
1112
1113     # find the patches if any
1114     if src.product.product_has_patches(p_info):
1115         patches = src.pyconf.Sequence()
1116         for patch_path in p_info.patches:
1117             p_path = src.Path(patch_path)
1118             p_path.copy(patches_tmp_dir)
1119             patches.append(os.path.basename(patch_path), "")
1120
1121     if (not with_vcs) and src.product.product_is_vcs(p_info):
1122         # in non vcs mode, if the product is not archive, then make it become archive.
1123
1124         # depending upon the incremental mode, select impacted sections
1125         if "properties" in p_info and "incremental" in p_info.properties and\
1126             p_info.properties.incremental == "yes":
1127             sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1128         else:
1129             sections = [p_info.section]
1130         for section in sections:
1131             if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1132                 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1133                           (p_name,section))
1134                 product_pyconf_cfg[section].get_source = "archive"
1135                 if not "archive_info" in product_pyconf_cfg[section]:
1136                     product_pyconf_cfg[section].addMapping("archive_info",
1137                                         src.pyconf.Mapping(product_pyconf_cfg),
1138                                         "")
1139                     product_pyconf_cfg[section].archive_info.archive_name =\
1140                         p_info.name + ".tgz"
1141     
1142     if (with_vcs) and src.product.product_is_vcs(p_info):
1143         # in vcs mode we must replace explicitely the git server url
1144         # (or it will not be found later because project files are not exported in archives)
1145         for section in product_pyconf_cfg:
1146             # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
1147             if "git_info" in product_pyconf_cfg[section]:
1148                 for repo in product_pyconf_cfg[section].git_info:
1149                     if repo in p_info.git_info:
1150                         product_pyconf_cfg[section].git_info[repo] =  p_info.git_info[repo]
1151
1152     # write the pyconf file to the temporary project location
1153     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1154                                            p_name + ".pyconf")
1155     ff = open(product_tmp_pyconf_path, 'w')
1156     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1157     product_pyconf_cfg.__save__(ff, 1)
1158     ff.close()
1159
1160
1161 def write_application_pyconf(config, application_tmp_dir):
1162     '''Write the application pyconf file in the specific temporary 
1163        directory containing the specific project of a source package.
1164
1165     :param config Config: The global configuration.
1166     :param application_tmp_dir str: The path to the temporary application 
1167                                     scripts directory of the project.
1168     '''
1169     application_name = config.VARS.application
1170     # write the pyconf file to the temporary application location
1171     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1172                                                application_name + ".pyconf")
1173     with open(application_tmp_pyconf_path, 'w') as f:
1174         f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1175         res = src.pyconf.Config()
1176         app = src.pyconf.deepCopyMapping(config.APPLICATION)
1177         # no base in packages
1178         if "base" in app:
1179             app.base = "no" 
1180         # Change the workdir
1181         app.workdir = src.pyconf.Reference(
1182                                  app,
1183                                  src.pyconf.DOLLAR,
1184                                  'VARS.salometoolsway + $VARS.sep + ".."')
1185         res.addMapping("APPLICATION", app, "")
1186         res.__save__(f, evaluated=False)
1187     
1188
1189 def sat_package(config, tmp_working_dir, options, logger):
1190     '''Prepare a dictionary that stores all the needed directories and files to
1191        add in a salomeTool package.
1192     
1193     :param tmp_working_dir str: The temporary local working directory 
1194     :param options OptResult: the options of the launched command
1195     :return: the dictionary that stores all the needed directories and files to
1196              add in a salomeTool package.
1197              {label : (path_on_local_machine, path_in_archive)}
1198     :rtype: dict
1199     '''
1200     d_project = {}
1201
1202     # we include sat himself
1203     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1204
1205     # and we overwrite local.pyconf with a clean wersion.
1206     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1207     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1208     local_cfg = src.pyconf.Config(local_file_path)
1209     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1210     local_cfg.LOCAL["base"] = "default"
1211     local_cfg.LOCAL["workdir"] = "default"
1212     local_cfg.LOCAL["log_dir"] = "default"
1213     local_cfg.LOCAL["archive_dir"] = "default"
1214     local_cfg.LOCAL["VCS"] = "None"
1215     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1216
1217     # if the archive contains a project, we write its relative path in local.pyconf
1218     if options.project:
1219         project_arch_path = os.path.join("projects", options.project, 
1220                                          os.path.basename(options.project_file_path))
1221         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1222
1223     ff = open(local_pyconf_tmp_path, 'w')
1224     local_cfg.__save__(ff, 1)
1225     ff.close()
1226     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1227     return d_project
1228     
1229
1230 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1231     '''Prepare a dictionary that stores all the needed directories and files to
1232        add in a project package.
1233     
1234     :param project_file_path str: The path to the local project.
1235     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1236     :param tmp_working_dir str: The temporary local directory containing some 
1237                                 specific directories or files needed in the 
1238                                 project package
1239     :param embedded_in_sat boolean : the project package is embedded in a sat package
1240     :return: the dictionary that stores all the needed directories and files to
1241              add in a project package.
1242              {label : (path_on_local_machine, path_in_archive)}
1243     :rtype: dict
1244     '''
1245     d_project = {}
1246     # Read the project file and get the directories to add to the package
1247     
1248     try: 
1249       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1250     except:
1251       logger.write("""
1252 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1253       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1254       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1255     
1256     paths = {"APPLICATIONPATH" : "applications",
1257              "PRODUCTPATH" : "products",
1258              "JOBPATH" : "jobs",
1259              "MACHINEPATH" : "machines"}
1260     if not ftp_mode:
1261         paths["ARCHIVEPATH"] = "archives"
1262
1263     # Loop over the project paths and add it
1264     project_file_name = os.path.basename(project_file_path)
1265     for path in paths:
1266         if path not in project_pyconf_cfg:
1267             continue
1268         if embedded_in_sat:
1269             dest_path = os.path.join("projects", name_project, paths[path])
1270             project_file_dest = os.path.join("projects", name_project, project_file_name)
1271         else:
1272             dest_path = paths[path]
1273             project_file_dest = project_file_name
1274
1275         # Add the directory to the files to add in the package
1276         d_project[path] = (project_pyconf_cfg[path], dest_path)
1277
1278         # Modify the value of the path in the package
1279         project_pyconf_cfg[path] = src.pyconf.Reference(
1280                                     project_pyconf_cfg,
1281                                     src.pyconf.DOLLAR,
1282                                     'project_path + "/' + paths[path] + '"')
1283     
1284     # Modify some values
1285     if "project_path" not in project_pyconf_cfg:
1286         project_pyconf_cfg.addMapping("project_path",
1287                                       src.pyconf.Mapping(project_pyconf_cfg),
1288                                       "")
1289     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1290                                                            src.pyconf.DOLLAR,
1291                                                            'PWD')
1292     # we don't want to export these two fields
1293     project_pyconf_cfg.__delitem__("file_path")
1294     project_pyconf_cfg.__delitem__("PWD")
1295     if ftp_mode:
1296         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1297     
1298     # Write the project pyconf file
1299     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1300     ff = open(project_pyconf_tmp_path, 'w')
1301     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1302     project_pyconf_cfg.__save__(ff, 1)
1303     ff.close()
1304     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1305     
1306     return d_project
1307
1308 def add_readme(config, options, where):
1309     readme_path = os.path.join(where, "README")
1310     with codecs.open(readme_path, "w", 'utf-8') as f:
1311
1312     # templates for building the header
1313         readme_header="""
1314 # This package was generated with sat $version
1315 # Date: $date
1316 # User: $user
1317 # Distribution : $dist
1318
1319 In the following, $$ROOT represents the directory where you have installed 
1320 SALOME (the directory where this file is located).
1321
1322 """
1323         if src.architecture.is_windows():
1324             readme_header = readme_header.replace('$$ROOT','%ROOT%')
1325         readme_compilation_with_binaries="""
1326
1327 compilation based on the binaries used as prerequisites
1328 =======================================================
1329
1330 If you fail to compile the complete application (for example because
1331 you are not root on your system and cannot install missing packages), you
1332 may try a partial compilation based on the binaries.
1333 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1334 and do some substitutions on cmake and .la files (replace the build directories
1335 with local paths).
1336 The procedure to do it is:
1337  1) Remove or rename INSTALL directory if it exists
1338  2) Execute the shell script install_bin.sh:
1339  > cd $ROOT
1340  > ./install_bin.sh
1341  3) Use SalomeTool (as explained in Sources section) and compile only the 
1342     modules you need to (with -p option)
1343
1344 """
1345         readme_header_tpl=string.Template(readme_header)
1346         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1347                 "README_BIN.template")
1348         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1349                 "README_LAUNCHER.template")
1350         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1351                 "README_BIN_VIRTUAL_APP.template")
1352         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1353                 "README_SRC.template")
1354         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1355                 "README_PROJECT.template")
1356         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1357                 "README_SAT.template")
1358
1359         # prepare substitution dictionary
1360         d = dict()
1361         d['user'] = config.VARS.user
1362         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1363         d['version'] = src.get_salometool_version(config)
1364         d['dist'] = config.VARS.dist
1365         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1366
1367         if options.binaries or options.sources:
1368             d['application'] = config.VARS.application
1369             d['BINARIES']    = config.INTERNAL.config.binary_dir
1370             d['SEPARATOR'] = config.VARS.sep
1371             if src.architecture.is_windows():
1372                 d['operatingSystem'] = 'Windows'
1373                 d['PYTHON3'] = 'python3'
1374                 d['ROOT']    = '%ROOT%'
1375             else:
1376                 d['operatingSystem'] = 'Linux'
1377                 d['PYTHON3'] = ''
1378                 d['ROOT']    = '$ROOT'
1379             f.write("# Application: " + d['application'] + "\n")
1380             if 'KERNEL' in config.APPLICATION.products:
1381                 VersionSalome = src.get_salome_version(config)
1382                 # Case where SALOME has the launcher that uses the SalomeContext API
1383                 if VersionSalome >= 730:
1384                     d['launcher'] = config.APPLICATION.profile.launcher_name
1385                 else:
1386                     d['virtual_app'] = 'runAppli' # this info is not used now)
1387
1388         # write the specific sections
1389         if options.binaries:
1390             f.write(src.template.substitute(readme_template_path_bin, d))
1391             if "virtual_app" in d:
1392                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1393             if "launcher" in d:
1394                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1395
1396         if options.sources:
1397             f.write(src.template.substitute(readme_template_path_src, d))
1398
1399         if options.binaries and options.sources and not src.architecture.is_windows():
1400             f.write(readme_compilation_with_binaries)
1401
1402         if options.project:
1403             f.write(src.template.substitute(readme_template_path_pro, d))
1404
1405         if options.sat:
1406             f.write(src.template.substitute(readme_template_path_sat, d))
1407     
1408     return readme_path
1409
1410 def update_config(config, logger,  prop, value):
1411     '''Remove from config.APPLICATION.products the products that have the property given as input.
1412     
1413     :param config Config: The global config.
1414     :param prop str: The property to filter
1415     :param value str: The value of the property to filter
1416     '''
1417     # if there is no APPLICATION (ex sat package -t) : nothing to do
1418     if "APPLICATION" in config:
1419         l_product_to_remove = []
1420         for product_name in config.APPLICATION.products.keys():
1421             prod_cfg = src.product.get_product_config(config, product_name)
1422             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1423                 l_product_to_remove.append(product_name)
1424         for product_name in l_product_to_remove:
1425             config.APPLICATION.products.__delitem__(product_name)
1426             logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1427
1428 def description():
1429     '''method that is called when salomeTools is called with --help option.
1430     
1431     :return: The text to display for the package command description.
1432     :rtype: str
1433     '''
1434     return _("""
1435 The package command creates a tar file archive of a product.
1436 There are four kinds of archive, which can be mixed:
1437
1438  1 - The binary archive. 
1439      It contains the product installation directories plus a launcher.
1440  2 - The sources archive. 
1441      It contains the product archives, a project (the application plus salomeTools).
1442  3 - The project archive. 
1443      It contains a project (give the project file path as argument).
1444  4 - The salomeTools archive. 
1445      It contains code utility salomeTools.
1446
1447 example:
1448  >> sat package SALOME-master --binaries --sources""")
1449   
1450 def run(args, runner, logger):
1451     '''method that is called when salomeTools is called with package parameter.
1452     '''
1453     
1454     # Parse the options
1455     (options, args) = parser.parse_args(args)
1456
1457     # Check that a type of package is called, and only one
1458     all_option_types = (options.binaries,
1459                         options.sources,
1460                         options.project not in ["", None],
1461                         options.sat)
1462
1463     # Check if no option for package type
1464     if all_option_types.count(True) == 0:
1465         msg = _("Error: Precise a type for the package\nUse one of the "
1466                 "following options: --binaries, --sources, --project or"
1467                 " --salometools")
1468         logger.write(src.printcolors.printcError(msg), 1)
1469         logger.write("\n", 1)
1470         return 1
1471     
1472     # The repository where to put the package if not Binary or Source
1473     package_default_path = runner.cfg.LOCAL.workdir
1474     
1475     # if the package contains binaries or sources:
1476     if options.binaries or options.sources:
1477         # Check that the command has been called with an application
1478         src.check_config_has_application(runner.cfg)
1479
1480         # Display information
1481         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1482                                                     runner.cfg.VARS.application), 1)
1483         
1484         # Get the default directory where to put the packages
1485         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1486         src.ensure_path_exists(package_default_path)
1487         
1488     # if the package contains a project:
1489     if options.project:
1490         # check that the project is visible by SAT
1491         projectNameFile = options.project + ".pyconf"
1492         foundProject = None
1493         for i in runner.cfg.PROJECTS.project_file_paths:
1494             baseName = os.path.basename(i)
1495             if baseName == projectNameFile:
1496                 foundProject = i
1497                 break
1498
1499         if foundProject is None:
1500             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1501             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1502 known projects are:
1503 %(2)s
1504
1505 Please add it in file:
1506 %(3)s""" % \
1507                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1508             logger.write(src.printcolors.printcError(msg), 1)
1509             logger.write("\n", 1)
1510             return 1
1511         else:
1512             options.project_file_path = foundProject
1513             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1514     
1515     # Remove the products that are filtered by the --without_properties option
1516     if options.without_properties:
1517         prop, value = options.without_properties
1518         update_config(runner.cfg, logger, prop, value)
1519
1520     # Remove from config the products that have the not_in_package property
1521     update_config(runner.cfg, logger, "not_in_package", "yes")
1522
1523     # for binary packages without sources, remove compile time products
1524     if options.binaries and (not options.sources):
1525         update_config(runner.cfg, logger, "compile_time", "yes")
1526     
1527     # get the name of the archive or build it
1528     if options.name:
1529         if os.path.basename(options.name) == options.name:
1530             # only a name (not a path)
1531             archive_name = options.name           
1532             dir_name = package_default_path
1533         else:
1534             archive_name = os.path.basename(options.name)
1535             dir_name = os.path.dirname(options.name)
1536         
1537         # suppress extension
1538         if archive_name[-len(".tgz"):] == ".tgz":
1539             archive_name = archive_name[:-len(".tgz")]
1540         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1541             archive_name = archive_name[:-len(".tar.gz")]
1542         
1543     else:
1544         archive_name=""
1545         dir_name = package_default_path
1546         if options.binaries or options.sources:
1547             archive_name = runner.cfg.APPLICATION.name
1548
1549         if options.binaries:
1550             archive_name += "-"+runner.cfg.VARS.dist
1551             
1552         if options.sources:
1553             archive_name += "-SRC"
1554             if options.with_vcs:
1555                 archive_name += "-VCS"
1556
1557         if options.sat:
1558             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1559
1560         if options.project:
1561             if options.sat:
1562                 archive_name += "_" 
1563             archive_name += ("satproject_" + options.project)
1564  
1565         if len(archive_name)==0: # no option worked 
1566             msg = _("Error: Cannot name the archive\n"
1567                     " check if at least one of the following options was "
1568                     "selected : --binaries, --sources, --project or"
1569                     " --salometools")
1570             logger.write(src.printcolors.printcError(msg), 1)
1571             logger.write("\n", 1)
1572             return 1
1573  
1574     path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1575     
1576     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1577
1578     # Create a working directory for all files that are produced during the
1579     # package creation and that will be removed at the end of the command
1580     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1581     src.ensure_path_exists(tmp_working_dir)
1582     logger.write("\n", 5)
1583     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1584     
1585     logger.write("\n", 3)
1586
1587     msg = _("Preparation of files to add to the archive")
1588     logger.write(src.printcolors.printcLabel(msg), 2)
1589     logger.write("\n", 2)
1590     
1591     d_files_to_add={}  # content of the archive
1592
1593     # a dict to hold paths that will need to be substitute for users recompilations
1594     d_paths_to_substitute={}  
1595
1596     if options.binaries:
1597         d_bin_files_to_add = binary_package(runner.cfg,
1598                                             logger,
1599                                             options,
1600                                             tmp_working_dir)
1601         # for all binaries dir, store the substitution that will be required 
1602         # for extra compilations
1603         for key in d_bin_files_to_add:
1604             if key.endswith("(bin)"):
1605                 source_dir = d_bin_files_to_add[key][0]
1606                 path_in_archive = d_bin_files_to_add[key][1].replace(
1607                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1608                    runner.cfg.INTERNAL.config.install_dir)
1609                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1610                     # if basename is the same we will just substitute the dirname 
1611                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1612                         os.path.dirname(path_in_archive)
1613                 else:
1614                     d_paths_to_substitute[source_dir]=path_in_archive
1615
1616         d_files_to_add.update(d_bin_files_to_add)
1617     if options.sources:
1618         d_files_to_add.update(source_package(runner,
1619                                         runner.cfg,
1620                                         logger, 
1621                                         options,
1622                                         tmp_working_dir))
1623         if options.binaries:
1624             # for archives with bin and sources we provide a shell script able to 
1625             # install binaries for compilation
1626             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1627                                                       tmp_working_dir,
1628                                                       d_paths_to_substitute,
1629                                                       "install_bin.sh")
1630             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1631             logger.write("substitutions that need to be done later : \n", 5)
1632             logger.write(str(d_paths_to_substitute), 5)
1633             logger.write("\n", 5)
1634     else:
1635         # --salomeTool option is not considered when --sources is selected, as this option
1636         # already brings salomeTool!
1637         if options.sat:
1638             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1639                                   options, logger))
1640         
1641     if options.project:
1642         DBG.write("config for package %s" % options.project, runner.cfg)
1643         d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1644
1645     if not(d_files_to_add):
1646         msg = _("Error: Empty dictionnary to build the archive!\n")
1647         logger.write(src.printcolors.printcError(msg), 1)
1648         logger.write("\n", 1)
1649         return 1
1650
1651     # Add the README file in the package
1652     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1653     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1654
1655     # Add the additional files of option add_files
1656     if options.add_files:
1657         for file_path in options.add_files:
1658             if not os.path.exists(file_path):
1659                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1660                 continue
1661             file_name = os.path.basename(file_path)
1662             d_files_to_add[file_name] = (file_path, file_name)
1663
1664     logger.write("\n", 2)
1665     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1666     logger.write("\n", 2)
1667     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1668
1669     res = 0
1670     try:
1671         # Creating the object tarfile
1672         tar = tarfile.open(path_targz, mode='w:gz')
1673         
1674         # get the filtering function if needed
1675         if old_python:
1676             filter_function = exclude_VCS_and_extensions_26
1677         else:
1678             filter_function = exclude_VCS_and_extensions
1679
1680         # Add the files to the tarfile object
1681         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1682         tar.close()
1683     except KeyboardInterrupt:
1684         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1685         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1686         # remove the working directory
1687         shutil.rmtree(tmp_working_dir)
1688         logger.write(_("OK"), 1)
1689         logger.write(_("\n"), 1)
1690         return 1
1691     
1692     # case if no application, only package sat as 'sat package -t'
1693     try:
1694         app = runner.cfg.APPLICATION
1695     except:
1696         app = None
1697
1698     # unconditionaly remove the tmp_local_working_dir
1699     if app is not None:
1700         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1701         if os.path.isdir(tmp_local_working_dir):
1702             shutil.rmtree(tmp_local_working_dir)
1703
1704     # remove the tmp directory, unless user has registered as developer
1705     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1706         shutil.rmtree(tmp_working_dir)
1707     
1708     # Print again the path of the package
1709     logger.write("\n", 2)
1710     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1711     
1712     return res