Salome HOME
correction coquille
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 BINARY = "binary"
35 SOURCE = "Source"
36 PROJECT = "Project"
37 SAT = "Sat"
38
39 ARCHIVE_DIR = "ARCHIVES"
40 PROJECT_DIR = "PROJECT"
41
42 IGNORED_DIRS = [".git", ".svn"]
43 IGNORED_EXTENSIONS = []
44
45 PROJECT_TEMPLATE = """#!/usr/bin/env python
46 #-*- coding:utf-8 -*-
47
48 # The path to the archive root directory
49 root_path : $PWD + "/../"
50 # path to the PROJECT
51 project_path : $PWD + "/"
52
53 # Where to search the archives of the products
54 ARCHIVEPATH : $root_path + "ARCHIVES"
55 # Where to search the pyconf of the applications
56 APPLICATIONPATH : $project_path + "applications/"
57 # Where to search the pyconf of the products
58 PRODUCTPATH : $project_path + "products/"
59 # Where to search the pyconf of the jobs of the project
60 JOBPATH : $project_path + "jobs/"
61 # Where to search the pyconf of the machines of the project
62 MACHINEPATH : $project_path + "machines/"
63 """
64
65 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
66 #-*- coding:utf-8 -*-
67
68   LOCAL :
69   {
70     base : 'default'
71     workdir : 'default'
72     log_dir : 'default'
73     archive_dir : 'default'
74     VCS : None
75     tag : None
76   }
77
78 PROJECTS :
79 {
80 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
81 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
82 }
83 """)
84
85 # Define all possible option for the package command :  sat package <options>
86 parser = src.options.Options()
87 parser.add_option('b', 'binaries', 'boolean', 'binaries',
88     _('Optional: Produce a binary package.'), False)
89 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
90     _('Optional: Only binary package: produce the archive even if '
91       'there are some missing products.'), False)
92 parser.add_option('s', 'sources', 'boolean', 'sources',
93     _('Optional: Produce a compilable archive of the sources of the '
94       'application.'), False)
95 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
96     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
97       'Sat prepare will use VCS mode instead to retrieve them'),
98     False)
99 parser.add_option('', 'ftp', 'boolean', 'ftp',
100     _('Optional: Do not embed archives for products in archive mode.' 
101     'Sat prepare will use ftp instead to retrieve them'),
102     False)
103 parser.add_option('p', 'project', 'string', 'project',
104     _('Optional: Produce an archive that contains a project.'), "")
105 parser.add_option('t', 'salometools', 'boolean', 'sat',
106     _('Optional: Produce an archive that contains salomeTools.'), False)
107 parser.add_option('n', 'name', 'string', 'name',
108     _('Optional: The name or full path of the archive.'), None)
109 parser.add_option('', 'add_files', 'list2', 'add_files',
110     _('Optional: The list of additional files to add to the archive.'), [])
111 parser.add_option('', 'without_properties', 'properties', 'without_properties',
112     _('Optional: Filter the products by their properties.\n\tSyntax: '
113       '--without_properties <property>:<value>'))
114
115
116 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
117     '''Create an archive containing all directories and files that are given in
118        the d_content argument.
119     
120     :param tar tarfile: The tarfile instance used to make the archive.
121     :param name_archive str: The name of the archive to make.
122     :param d_content dict: The dictionary that contain all directories and files
123                            to add in the archive.
124                            d_content[label] = 
125                                         (path_on_local_machine, path_in_archive)
126     :param logger Logger: the logging instance
127     :param f_exclude Function: the function that filters
128     :return: 0 if success, 1 if not.
129     :rtype: int
130     '''
131     # get the max length of the messages in order to make the display
132     max_len = len(max(d_content.keys(), key=len))
133     
134     success = 0
135     # loop over each directory or file stored in the d_content dictionary
136     names = sorted(d_content.keys())
137     DBG.write("add tar names", names)
138
139     # used to avoid duplications (for pip install in python, or single_install_dir cases)
140     already_added=set() 
141     for name in names:
142         # display information
143         len_points = max_len - len(name) + 3
144         local_path, archive_path = d_content[name]
145         in_archive = os.path.join(name_archive, archive_path)
146         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
147         # Get the local path and the path in archive 
148         # of the directory or file to add
149         # Add it in the archive
150         try:
151             key=local_path+"->"+in_archive
152             if key not in already_added:
153                 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
154                 already_added.add(key)
155             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
156         except Exception as e:
157             logger.write(src.printcolors.printcError(_("KO ")), 3)
158             logger.write(str(e), 3)
159             success = 1
160         logger.write("\n", 3)
161     return success
162
163 def exclude_VCS_and_extensions(filename):
164     ''' The function that is used to exclude from package the link to the 
165         VCS repositories (like .git)
166
167     :param filename Str: The filname to exclude (or not).
168     :return: True if the file has to be exclude
169     :rtype: Boolean
170     '''
171     for dir_name in IGNORED_DIRS:
172         if dir_name in filename:
173             return True
174     for extension in IGNORED_EXTENSIONS:
175         if filename.endswith(extension):
176             return True
177     return False
178
179 def produce_relative_launcher(config,
180                               logger,
181                               file_dir,
182                               file_name,
183                               binaries_dir_name):
184     '''Create a specific SALOME launcher for the binary package. This launcher 
185        uses relative paths.
186     
187     :param config Config: The global configuration.
188     :param logger Logger: the logging instance
189     :param file_dir str: the directory where to put the launcher
190     :param file_name str: The launcher name
191     :param binaries_dir_name str: the name of the repository where the binaries
192                                   are, in the archive.
193     :return: the path of the produced launcher
194     :rtype: str
195     '''
196     
197     # get KERNEL installation path 
198     kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
199
200     # set kernel bin dir (considering fhs property)
201     kernel_cfg = src.product.get_product_config(config, "KERNEL")
202     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
203         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
204     else:
205         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
206
207     # check if the application contains an application module
208     # check also if the application has a distene product, 
209     # in this case get its licence file name
210     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
211     salome_application_name="Not defined" 
212     distene_licence_file_name=False
213     for prod_name, prod_info in l_product_info:
214         # look for a "salome application" and a distene product
215         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
216             distene_licence_file_name = src.product.product_has_licence(prod_info, 
217                                             config.PATHS.LICENCEPATH) 
218         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
219             salome_application_name=prod_info.name
220
221     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
222     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
223     if salome_application_name == "Not defined":
224         app_root_dir=kernel_root_dir
225     else:
226         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
227
228     additional_env={}
229     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
230                                                    config.VARS.sep + bin_kernel_install_dir
231     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
232         additional_env['sat_python_version'] = 3
233     else:
234         additional_env['sat_python_version'] = 2
235
236     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
237
238     # create an environment file writer
239     writer = src.environment.FileEnvWriter(config,
240                                            logger,
241                                            file_dir,
242                                            src_root=None,
243                                            env_info=None)
244     
245     filepath = os.path.join(file_dir, file_name)
246     # Write
247     writer.write_env_file(filepath,
248                           False,  # for launch
249                           "cfgForPy",
250                           additional_env=additional_env,
251                           no_path_init="False",
252                           for_package = binaries_dir_name)
253     
254     # Little hack to put out_dir_Path outside the strings
255     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
256     src.replace_in_file(filepath, "'out_dir_Path + ", "out_dir_Path + '" )
257     
258     # A hack to put a call to a file for distene licence.
259     # It does nothing to an application that has no distene product
260     if distene_licence_file_name:
261         logger.write("Application has a distene licence file! We use it in package launcher", 5)
262         hack_for_distene_licence(filepath, distene_licence_file_name)
263        
264     # change the rights in order to make the file executable for everybody
265     os.chmod(filepath,
266              stat.S_IRUSR |
267              stat.S_IRGRP |
268              stat.S_IROTH |
269              stat.S_IWUSR |
270              stat.S_IXUSR |
271              stat.S_IXGRP |
272              stat.S_IXOTH)
273
274     return filepath
275
276 def hack_for_distene_licence(filepath, licence_file):
277     '''Replace the distene licence env variable by a call to a file.
278     
279     :param filepath Str: The path to the launcher to modify.
280     '''  
281     shutil.move(filepath, filepath + "_old")
282     fileout= filepath
283     filein = filepath + "_old"
284     fin = open(filein, "r")
285     fout = open(fileout, "w")
286     text = fin.readlines()
287     # Find the Distene section
288     num_line = -1
289     for i,line in enumerate(text):
290         if "# Set DISTENE License" in line:
291             num_line = i
292             break
293     if num_line == -1:
294         # No distene product, there is nothing to do
295         fin.close()
296         for line in text:
297             fout.write(line)
298         fout.close()
299         return
300     del text[num_line +1]
301     del text[num_line +1]
302     text_to_insert ="""    try:
303         distene_licence_file="%s"
304         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
305             import importlib.util
306             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
307             distene=importlib.util.module_from_spec(spec_dist)
308             spec_dist.loader.exec_module(distene)
309         else:
310             import imp
311             distene = imp.load_source('distene_licence', distene_licence_file)
312         distene.set_distene_variables(context)
313     except:
314         pass\n"""  % licence_file
315     text.insert(num_line + 1, text_to_insert)
316     for line in text:
317         fout.write(line)
318     fin.close()    
319     fout.close()
320     return
321     
322 def produce_relative_env_files(config,
323                               logger,
324                               file_dir,
325                               binaries_dir_name):
326     '''Create some specific environment files for the binary package. These 
327        files use relative paths.
328     
329     :param config Config: The global configuration.
330     :param logger Logger: the logging instance
331     :param file_dir str: the directory where to put the files
332     :param binaries_dir_name str: the name of the repository where the binaries
333                                   are, in the archive.
334     :return: the list of path of the produced environment files
335     :rtype: List
336     '''  
337     # create an environment file writer
338     writer = src.environment.FileEnvWriter(config,
339                                            logger,
340                                            file_dir,
341                                            src_root=None)
342     
343     if src.architecture.is_windows():
344       shell = "bat"
345       filename  = "env_launch.bat"
346     else:
347       shell = "bash"
348       filename  = "env_launch.sh"
349
350     # Write
351     filepath = writer.write_env_file(filename,
352                           False, # for launch
353                           shell,
354                           for_package = binaries_dir_name)
355
356     # Little hack to put out_dir_Path as environment variable
357     src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
358
359     # change the rights in order to make the file executable for everybody
360     os.chmod(filepath,
361              stat.S_IRUSR |
362              stat.S_IRGRP |
363              stat.S_IROTH |
364              stat.S_IWUSR |
365              stat.S_IXUSR |
366              stat.S_IXGRP |
367              stat.S_IXOTH)
368     
369     return filepath
370
371 def produce_install_bin_file(config,
372                              logger,
373                              file_dir,
374                              d_sub,
375                              file_name):
376     '''Create a bash shell script which do substitutions in BIRARIES dir 
377        in order to use it for extra compilations.
378     
379     :param config Config: The global configuration.
380     :param logger Logger: the logging instance
381     :param file_dir str: the directory where to put the files
382     :param d_sub, dict: the dictionnary that contains the substitutions to be done
383     :param file_name str: the name of the install script file
384     :return: the produced file
385     :rtype: str
386     '''  
387     # Write
388     filepath = os.path.join(file_dir, file_name)
389     # open the file and write into it
390     # use codec utf-8 as sat variables are in unicode
391     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
392         installbin_template_path = os.path.join(config.VARS.internal_dir,
393                                         "INSTALL_BIN.template")
394         
395         # build the name of the directory that will contain the binaries
396         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
397         # build the substitution loop
398         loop_cmd = "for f in $(grep -RIl"
399         for key in d_sub:
400             loop_cmd += " -e "+ key
401         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
402                     '); do\n     sed -i "\n'
403         for key in d_sub:
404             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
405         loop_cmd += '            " $f\ndone'
406
407         d={}
408         d["BINARIES_DIR"] = binaries_dir_name
409         d["SUBSTITUTION_LOOP"]=loop_cmd
410         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
411         
412         # substitute the template and write it in file
413         content=src.template.substitute(installbin_template_path, d)
414         installbin_file.write(content)
415         # change the rights in order to make the file executable for everybody
416         os.chmod(filepath,
417                  stat.S_IRUSR |
418                  stat.S_IRGRP |
419                  stat.S_IROTH |
420                  stat.S_IWUSR |
421                  stat.S_IXUSR |
422                  stat.S_IXGRP |
423                  stat.S_IXOTH)
424     
425     return filepath
426
427 def product_appli_creation_script(config,
428                                   logger,
429                                   file_dir,
430                                   binaries_dir_name):
431     '''Create a script that can produce an application (EDF style) in the binary
432        package.
433     
434     :param config Config: The global configuration.
435     :param logger Logger: the logging instance
436     :param file_dir str: the directory where to put the file
437     :param binaries_dir_name str: the name of the repository where the binaries
438                                   are, in the archive.
439     :return: the path of the produced script file
440     :rtype: Str
441     '''
442     template_name = "create_appli.py.for_bin_packages.template"
443     template_path = os.path.join(config.VARS.internal_dir, template_name)
444     text_to_fill = open(template_path, "r").read()
445     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
446                                         '"' + binaries_dir_name + '"')
447     
448     text_to_add = ""
449     for product_name in get_SALOME_modules(config):
450         product_info = src.product.get_product_config(config, product_name)
451        
452         if src.product.product_is_smesh_plugin(product_info):
453             continue
454
455         if 'install_dir' in product_info and bool(product_info.install_dir):
456             if src.product.product_is_cpp(product_info):
457                 # cpp module
458                 for cpp_name in src.product.get_product_components(product_info):
459                     line_to_add = ("<module name=\"" + 
460                                    cpp_name + 
461                                    "\" gui=\"yes\" path=\"''' + "
462                                    "os.path.join(dir_bin_name, \"" + 
463                                    cpp_name + "\") + '''\"/>")
464             else:
465                 # regular module
466                 line_to_add = ("<module name=\"" + 
467                                product_name + 
468                                "\" gui=\"yes\" path=\"''' + "
469                                "os.path.join(dir_bin_name, \"" + 
470                                product_name + "\") + '''\"/>")
471             text_to_add += line_to_add + "\n"
472     
473     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
474     
475     tmp_file_path = os.path.join(file_dir, "create_appli.py")
476     ff = open(tmp_file_path, "w")
477     ff.write(filled_text)
478     ff.close()
479     
480     # change the rights in order to make the file executable for everybody
481     os.chmod(tmp_file_path,
482              stat.S_IRUSR |
483              stat.S_IRGRP |
484              stat.S_IROTH |
485              stat.S_IWUSR |
486              stat.S_IXUSR |
487              stat.S_IXGRP |
488              stat.S_IXOTH)
489     
490     return tmp_file_path
491
492 def binary_package(config, logger, options, tmp_working_dir):
493     '''Prepare a dictionary that stores all the needed directories and files to
494        add in a binary package.
495     
496     :param config Config: The global configuration.
497     :param logger Logger: the logging instance
498     :param options OptResult: the options of the launched command
499     :param tmp_working_dir str: The temporary local directory containing some 
500                                 specific directories or files needed in the 
501                                 binary package
502     :return: the dictionary that stores all the needed directories and files to
503              add in a binary package.
504              {label : (path_on_local_machine, path_in_archive)}
505     :rtype: dict
506     '''
507
508     # Get the list of product installation to add to the archive
509     l_products_name = sorted(config.APPLICATION.products.keys())
510     l_product_info = src.product.get_products_infos(l_products_name,
511                                                     config)
512     l_install_dir = []
513     l_source_dir = []
514     l_not_installed = []
515     l_sources_not_present = []
516     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
517     if ("APPLICATION" in config  and
518         "properties"  in config.APPLICATION  and
519         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
520         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
521             generate_mesa_launcher=True
522
523     for prod_name, prod_info in l_product_info:
524         # skip product with property not_in_package set to yes
525         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
526             continue  
527
528         # Add the sources of the products that have the property 
529         # sources_in_package : "yes"
530         if src.get_property_in_product_cfg(prod_info,
531                                            "sources_in_package") == "yes":
532             if os.path.exists(prod_info.source_dir):
533                 l_source_dir.append((prod_name, prod_info.source_dir))
534             else:
535                 l_sources_not_present.append(prod_name)
536
537         # ignore the native and fixed products for install directories
538         if (src.product.product_is_native(prod_info) 
539                 or src.product.product_is_fixed(prod_info)
540                 or not src.product.product_compiles(prod_info)):
541             continue
542         if src.product.check_installation(config, prod_info):
543             l_install_dir.append((prod_name, prod_info.install_dir))
544         else:
545             l_not_installed.append(prod_name)
546         
547         # Add also the cpp generated modules (if any)
548         if src.product.product_is_cpp(prod_info):
549             # cpp module
550             for name_cpp in src.product.get_product_components(prod_info):
551                 install_dir = os.path.join(config.APPLICATION.workdir,
552                                            config.INTERNAL.config.install_dir,
553                                            name_cpp) 
554                 if os.path.exists(install_dir):
555                     l_install_dir.append((name_cpp, install_dir))
556                 else:
557                     l_not_installed.append(name_cpp)
558         
559     # check the name of the directory that (could) contains the binaries 
560     # from previous detar
561     binaries_from_detar = os.path.join(
562                               config.APPLICATION.workdir,
563                               config.INTERNAL.config.binary_dir + config.VARS.dist)
564     if os.path.exists(binaries_from_detar):
565          logger.write("""
566 WARNING: existing binaries directory from previous detar installation:
567          %s
568          To make new package from this, you have to: 
569          1) install binaries in INSTALL directory with the script "install_bin.sh" 
570             see README file for more details
571          2) or recompile everything in INSTALL with "sat compile" command 
572             this step is long, and requires some linux packages to be installed 
573             on your system\n
574 """ % binaries_from_detar)
575     
576     # Print warning or error if there are some missing products
577     if len(l_not_installed) > 0:
578         text_missing_prods = ""
579         for p_name in l_not_installed:
580             text_missing_prods += "-" + p_name + "\n"
581         if not options.force_creation:
582             msg = _("ERROR: there are missing products installations:")
583             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
584                                      text_missing_prods),
585                          1)
586             return None
587         else:
588             msg = _("WARNING: there are missing products installations:")
589             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
590                                      text_missing_prods),
591                          1)
592
593     # Do the same for sources
594     if len(l_sources_not_present) > 0:
595         text_missing_prods = ""
596         for p_name in l_sources_not_present:
597             text_missing_prods += "-" + p_name + "\n"
598         if not options.force_creation:
599             msg = _("ERROR: there are missing products sources:")
600             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
601                                      text_missing_prods),
602                          1)
603             return None
604         else:
605             msg = _("WARNING: there are missing products sources:")
606             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
607                                      text_missing_prods),
608                          1)
609  
610     # construct the name of the directory that will contain the binaries
611     binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
612     
613     # construct the correlation table between the product names, there 
614     # actual install directories and there install directory in archive
615     d_products = {}
616     for prod_name, install_dir in l_install_dir:
617         path_in_archive = os.path.join(binaries_dir_name, os.path.basename(install_dir))
618         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
619         
620     for prod_name, source_dir in l_source_dir:
621         path_in_archive = os.path.join("SOURCES", prod_name)
622         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
623
624     # for packages of SALOME applications including KERNEL, 
625     # we produce a salome launcher or a virtual application (depending on salome version)
626     if 'KERNEL' in config.APPLICATION.products:
627         VersionSalome = src.get_salome_version(config)
628         # Case where SALOME has the launcher that uses the SalomeContext API
629         if VersionSalome >= 730:
630             # create the relative launcher and add it to the files to add
631             launcher_name = src.get_launcher_name(config)
632             launcher_package = produce_relative_launcher(config,
633                                                  logger,
634                                                  tmp_working_dir,
635                                                  launcher_name,
636                                                  binaries_dir_name)
637             d_products["launcher"] = (launcher_package, launcher_name)
638
639             # if the application contains mesa products, we generate in addition to the 
640             # classical salome launcher a launcher using mesa and called mesa_salome 
641             # (the mesa launcher will be used for remote usage through ssh).
642             if generate_mesa_launcher:
643                 #if there is one : store the use_mesa property
644                 restore_use_mesa_option=None
645                 if ('properties' in config.APPLICATION and 
646                     'use_mesa' in config.APPLICATION.properties):
647                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
648
649                 # activate mesa property, and generate a mesa launcher
650                 src.activate_mesa_property(config)  #activate use_mesa property
651                 launcher_mesa_name="mesa_"+launcher_name
652                 launcher_package_mesa = produce_relative_launcher(config,
653                                                      logger,
654                                                      tmp_working_dir,
655                                                      launcher_mesa_name,
656                                                      binaries_dir_name)
657                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
658
659                 # if there was a use_mesa value, we restore it
660                 # else we set it to the default value "no"
661                 if restore_use_mesa_option != None:
662                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
663                 else:
664                     config.APPLICATION.properties.use_mesa="no"
665
666             if options.sources:
667                 # if we mix binaries and sources, we add a copy of the launcher, 
668                 # prefixed  with "bin",in order to avoid clashes
669                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
670         else:
671             # Provide a script for the creation of an application EDF style
672             appli_script = product_appli_creation_script(config,
673                                                         logger,
674                                                         tmp_working_dir,
675                                                         binaries_dir_name)
676             
677             d_products["appli script"] = (appli_script, "create_appli.py")
678
679     # Put also the environment file
680     env_file = produce_relative_env_files(config,
681                                            logger,
682                                            tmp_working_dir,
683                                            binaries_dir_name)
684
685     if src.architecture.is_windows():
686       filename  = "env_launch.bat"
687     else:
688       filename  = "env_launch.sh"
689     d_products["environment file"] = (env_file, filename)      
690
691     return d_products
692
693 def source_package(sat, config, logger, options, tmp_working_dir):
694     '''Prepare a dictionary that stores all the needed directories and files to
695        add in a source package.
696     
697     :param config Config: The global configuration.
698     :param logger Logger: the logging instance
699     :param options OptResult: the options of the launched command
700     :param tmp_working_dir str: The temporary local directory containing some 
701                                 specific directories or files needed in the 
702                                 binary package
703     :return: the dictionary that stores all the needed directories and files to
704              add in a source package.
705              {label : (path_on_local_machine, path_in_archive)}
706     :rtype: dict
707     '''
708     
709     d_archives={}
710     # Get all the products that are prepared using an archive
711     # unless ftp mode is specified (in this case the user of the
712     # archive will get the sources through the ftp mode of sat prepare
713     if not options.ftp:
714         logger.write("Find archive products ... ")
715         d_archives, l_pinfo_vcs = get_archives(config, logger)
716         logger.write("Done\n")
717
718     d_archives_vcs = {}
719     if not options.with_vcs and len(l_pinfo_vcs) > 0:
720         # Make archives with the products that are not prepared using an archive
721         # (git, cvs, svn, etc)
722         logger.write("Construct archives for vcs products ... ")
723         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
724                                           sat,
725                                           config,
726                                           logger,
727                                           tmp_working_dir)
728         logger.write("Done\n")
729
730     # Create a project
731     logger.write("Create the project ... ")
732     d_project = create_project_for_src_package(config,
733                                                tmp_working_dir,
734                                                options.with_vcs,
735                                                options.ftp)
736     logger.write("Done\n")
737     
738     # Add salomeTools
739     tmp_sat = add_salomeTools(config, tmp_working_dir)
740     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
741     
742     # Add a sat symbolic link if not win
743     if not src.architecture.is_windows():
744         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
745         try:
746             t = os.getcwd()
747         except:
748             # In the jobs, os.getcwd() can fail
749             t = config.LOCAL.workdir
750         os.chdir(tmp_working_dir)
751         if os.path.lexists(tmp_satlink_path):
752             os.remove(tmp_satlink_path)
753         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
754         os.chdir(t)
755         
756         d_sat["sat link"] = (tmp_satlink_path, "sat")
757     
758     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
759     return d_source
760
761 def get_archives(config, logger):
762     '''Find all the products that are get using an archive and all the products
763        that are get using a vcs (git, cvs, svn) repository.
764     
765     :param config Config: The global configuration.
766     :param logger Logger: the logging instance
767     :return: the dictionary {name_product : 
768              (local path of its archive, path in the package of its archive )}
769              and the list of specific configuration corresponding to the vcs 
770              products
771     :rtype: (Dict, List)
772     '''
773     # Get the list of product informations
774     l_products_name = config.APPLICATION.products.keys()
775     l_product_info = src.product.get_products_infos(l_products_name,
776                                                     config)
777     d_archives = {}
778     l_pinfo_vcs = []
779     for p_name, p_info in l_product_info:
780         # skip product with property not_in_package set to yes
781         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
782             continue  
783         # ignore the native and fixed products
784         if (src.product.product_is_native(p_info) 
785                 or src.product.product_is_fixed(p_info)):
786             continue
787         if p_info.get_source == "archive":
788             archive_path = p_info.archive_info.archive_name
789             archive_name = os.path.basename(archive_path)
790             d_archives[p_name] = (archive_path,
791                                   os.path.join(ARCHIVE_DIR, archive_name))
792             if (src.appli_test_property(config,"pip", "yes") and 
793                 src.product.product_test_property(p_info,"pip", "yes")):
794                 # if pip mode is activated, and product is managed by pip
795                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
796                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
797                     "%s-%s*" % (p_info.name, p_info.version))
798                 pip_wheel_path=glob.glob(pip_wheel_pattern)
799                 msg_pip_not_found="Error in get_archive, pip wheel for "\
800                                   "product %s-%s was not found in %s directory"
801                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
802                                   "product %s-%s were found in %s directory"
803                 if len(pip_wheel_path)==0:
804                     raise src.SatException(msg_pip_not_found %\
805                         (p_info.name, p_info.version, pip_wheels_dir))
806                 if len(pip_wheel_path)>1:
807                     raise src.SatException(msg_pip_two_or_more %\
808                         (p_info.name, p_info.version, pip_wheels_dir))
809
810                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
811                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
812                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
813         else:
814             # this product is not managed by archive, 
815             # an archive of the vcs directory will be created by get_archive_vcs
816             l_pinfo_vcs.append((p_name, p_info)) 
817             
818     return d_archives, l_pinfo_vcs
819
820 def add_salomeTools(config, tmp_working_dir):
821     '''Prepare a version of salomeTools that has a specific local.pyconf file 
822        configured for a source package.
823
824     :param config Config: The global configuration.
825     :param tmp_working_dir str: The temporary local directory containing some 
826                                 specific directories or files needed in the 
827                                 source package
828     :return: The path to the local salomeTools directory to add in the package
829     :rtype: str
830     '''
831     # Copy sat in the temporary working directory
832     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
833     sat_running_path = src.Path(config.VARS.salometoolsway)
834     sat_running_path.copy(sat_tmp_path)
835     
836     # Update the local.pyconf file that contains the path to the project
837     local_pyconf_name = "local.pyconf"
838     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
839     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
840     # Remove the .pyconf file in the root directory of salomeTools if there is
841     # any. (For example when launching jobs, a pyconf file describing the jobs 
842     # can be here and is not useful) 
843     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
844     for file_or_dir in files_or_dir_SAT:
845         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
846             file_path = os.path.join(tmp_working_dir,
847                                      "salomeTools",
848                                      file_or_dir)
849             os.remove(file_path)
850     
851     ff = open(local_pyconf_file, "w")
852     ff.write(LOCAL_TEMPLATE)
853     ff.close()
854     
855     return sat_tmp_path.path
856
857 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
858     '''For sources package that require that all products are get using an 
859        archive, one has to create some archive for the vcs products.
860        So this method calls the clean and source command of sat and then create
861        the archives.
862
863     :param l_pinfo_vcs List: The list of specific configuration corresponding to
864                              each vcs product
865     :param sat Sat: The Sat instance that can be called to clean and source the
866                     products
867     :param config Config: The global configuration.
868     :param logger Logger: the logging instance
869     :param tmp_working_dir str: The temporary local directory containing some 
870                                 specific directories or files needed in the 
871                                 source package
872     :return: the dictionary that stores all the archives to add in the source 
873              package. {label : (path_on_local_machine, path_in_archive)}
874     :rtype: dict
875     '''
876     # clean the source directory of all the vcs products, then use the source 
877     # command and thus construct an archive that will not contain the patches
878     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
879     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
880       logger.write(_("\nclean sources\n"))
881       args_clean = config.VARS.application
882       args_clean += " --sources --products "
883       args_clean += ",".join(l_prod_names)
884       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
885       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
886     if True:
887       # source
888       logger.write(_("get sources\n"))
889       args_source = config.VARS.application
890       args_source += " --products "
891       args_source += ",".join(l_prod_names)
892       svgDir = sat.cfg.APPLICATION.workdir
893       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
894       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
895       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
896       # DBG.write("sat config id", id(sat.cfg), True)
897       # shit as config is not same id() as for sat.source()
898       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
899       import source
900       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
901       
902       # make the new archives
903       d_archives_vcs = {}
904       for pn, pinfo in l_pinfo_vcs:
905           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
906           logger.write("make archive vcs '%s'\n" % path_archive)
907           d_archives_vcs[pn] = (path_archive,
908                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
909       sat.cfg.APPLICATION.workdir = svgDir
910       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
911     return d_archives_vcs
912
913 def make_archive(prod_name, prod_info, where):
914     '''Create an archive of a product by searching its source directory.
915
916     :param prod_name str: The name of the product.
917     :param prod_info Config: The specific configuration corresponding to the 
918                              product
919     :param where str: The path of the repository where to put the resulting 
920                       archive
921     :return: The path of the resulting archive
922     :rtype: str
923     '''
924     path_targz_prod = os.path.join(where, prod_name + ".tgz")
925     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
926     local_path = prod_info.source_dir
927     tar_prod.add(local_path,
928                  arcname=prod_name,
929                  exclude=exclude_VCS_and_extensions)
930     tar_prod.close()
931     return path_targz_prod       
932
933 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
934     '''Create a specific project for a source package.
935
936     :param config Config: The global configuration.
937     :param tmp_working_dir str: The temporary local directory containing some 
938                                 specific directories or files needed in the 
939                                 source package
940     :param with_vcs boolean: True if the package is with vcs products (not 
941                              transformed into archive products)
942     :param with_ftp boolean: True if the package use ftp servers to get archives
943     :return: The dictionary 
944              {"project" : (produced project, project path in the archive)}
945     :rtype: Dict
946     '''
947
948     # Create in the working temporary directory the full project tree
949     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
950     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
951                                          "products")
952     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
953                                          "products",
954                                          "compil_scripts")
955     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
956                                          "products",
957                                          "env_scripts")
958     patches_tmp_dir = os.path.join(project_tmp_dir,
959                                          "products",
960                                          "patches")
961     application_tmp_dir = os.path.join(project_tmp_dir,
962                                          "applications")
963     for directory in [project_tmp_dir,
964                       compil_scripts_tmp_dir,
965                       env_scripts_tmp_dir,
966                       patches_tmp_dir,
967                       application_tmp_dir]:
968         src.ensure_path_exists(directory)
969
970     # Create the pyconf that contains the information of the project
971     project_pyconf_name = "project.pyconf"        
972     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
973     ff = open(project_pyconf_file, "w")
974     ff.write(PROJECT_TEMPLATE)
975     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
976         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
977         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
978             ftp_path=ftp_path+":"+ftpserver
979         ftp_path+='"'
980         ff.write("# ftp servers where to search for prerequisite archives\n")
981         ff.write(ftp_path)
982     # add licence paths if any
983     if len(config.PATHS.LICENCEPATH) > 0:  
984         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
985         for path in config.PATHS.LICENCEPATH[1:]:
986             licence_path=licence_path+":"+path
987         licence_path+='"'
988         ff.write("\n# Where to search for licences\n")
989         ff.write(licence_path)
990         
991
992     ff.close()
993     
994     # Loop over the products to get there pyconf and all the scripts 
995     # (compilation, environment, patches)
996     # and create the pyconf file to add to the project
997     lproducts_name = config.APPLICATION.products.keys()
998     l_products = src.product.get_products_infos(lproducts_name, config)
999     for p_name, p_info in l_products:
1000         # skip product with property not_in_package set to yes
1001         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1002             continue  
1003         find_product_scripts_and_pyconf(p_name,
1004                                         p_info,
1005                                         config,
1006                                         with_vcs,
1007                                         compil_scripts_tmp_dir,
1008                                         env_scripts_tmp_dir,
1009                                         patches_tmp_dir,
1010                                         products_pyconf_tmp_dir)
1011     
1012     find_application_pyconf(config, application_tmp_dir)
1013     
1014     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1015     return d_project
1016
1017 def find_product_scripts_and_pyconf(p_name,
1018                                     p_info,
1019                                     config,
1020                                     with_vcs,
1021                                     compil_scripts_tmp_dir,
1022                                     env_scripts_tmp_dir,
1023                                     patches_tmp_dir,
1024                                     products_pyconf_tmp_dir):
1025     '''Create a specific pyconf file for a given product. Get its environment 
1026        script, its compilation script and patches and put it in the temporary
1027        working directory. This method is used in the source package in order to
1028        construct the specific project.
1029
1030     :param p_name str: The name of the product.
1031     :param p_info Config: The specific configuration corresponding to the 
1032                              product
1033     :param config Config: The global configuration.
1034     :param with_vcs boolean: True if the package is with vcs products (not 
1035                              transformed into archive products)
1036     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1037                                        scripts directory of the project.
1038     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1039                                     directory of the project.
1040     :param patches_tmp_dir str: The path to the temporary patch scripts 
1041                                 directory of the project.
1042     :param products_pyconf_tmp_dir str: The path to the temporary product 
1043                                         scripts directory of the project.
1044     '''
1045     
1046     # read the pyconf of the product
1047     product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
1048                                            config.PATHS.PRODUCTPATH)
1049     product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
1050
1051     # find the compilation script if any
1052     if src.product.product_has_script(p_info):
1053         compil_script_path = src.Path(p_info.compil_script)
1054         compil_script_path.copy(compil_scripts_tmp_dir)
1055         product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
1056                                                     p_info.compil_script)
1057     # find the environment script if any
1058     if src.product.product_has_env_script(p_info):
1059         env_script_path = src.Path(p_info.environ.env_script)
1060         env_script_path.copy(env_scripts_tmp_dir)
1061         product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
1062                                                 p_info.environ.env_script)
1063     # find the patches if any
1064     if src.product.product_has_patches(p_info):
1065         patches = src.pyconf.Sequence()
1066         for patch_path in p_info.patches:
1067             p_path = src.Path(patch_path)
1068             p_path.copy(patches_tmp_dir)
1069             patches.append(os.path.basename(patch_path), "")
1070
1071         product_pyconf_cfg[p_info.section].patches = patches
1072     
1073     if with_vcs:
1074         # put in the pyconf file the resolved values
1075         for info in ["git_info", "cvs_info", "svn_info"]:
1076             if info in p_info:
1077                 for key in p_info[info]:
1078                     product_pyconf_cfg[p_info.section][info][key] = p_info[
1079                                                                       info][key]
1080     else:
1081         # if the product is not archive, then make it become archive.
1082         if src.product.product_is_vcs(p_info):
1083             product_pyconf_cfg[p_info.section].get_source = "archive"
1084             if not "archive_info" in product_pyconf_cfg[p_info.section]:
1085                 product_pyconf_cfg[p_info.section].addMapping("archive_info",
1086                                         src.pyconf.Mapping(product_pyconf_cfg),
1087                                         "")
1088             product_pyconf_cfg[p_info.section
1089                               ].archive_info.archive_name = p_info.name + ".tgz"
1090     
1091     # write the pyconf file to the temporary project location
1092     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1093                                            p_name + ".pyconf")
1094     ff = open(product_tmp_pyconf_path, 'w')
1095     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1096     product_pyconf_cfg.__save__(ff, 1)
1097     ff.close()
1098
1099 def find_application_pyconf(config, application_tmp_dir):
1100     '''Find the application pyconf file and put it in the specific temporary 
1101        directory containing the specific project of a source package.
1102
1103     :param config Config: The global configuration.
1104     :param application_tmp_dir str: The path to the temporary application 
1105                                        scripts directory of the project.
1106     '''
1107     # read the pyconf of the application
1108     application_name = config.VARS.application
1109     application_pyconf_path = src.find_file_in_lpath(
1110                                             application_name + ".pyconf",
1111                                             config.PATHS.APPLICATIONPATH)
1112     application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1113     
1114     # Change the workdir
1115     application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1116                                     application_pyconf_cfg,
1117                                     src.pyconf.DOLLAR,
1118                                     'VARS.salometoolsway + $VARS.sep + ".."')
1119
1120     # Prevent from compilation in base
1121     application_pyconf_cfg.APPLICATION.no_base = "yes"
1122     
1123     #remove products that are not in config (which were filtered by --without_properties)
1124     for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1125         if product_name not in config.APPLICATION.products.keys():
1126             application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1127
1128     # write the pyconf file to the temporary application location
1129     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1130                                                application_name + ".pyconf")
1131
1132     ff = open(application_tmp_pyconf_path, 'w')
1133     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1134     application_pyconf_cfg.__save__(ff, 1)
1135     ff.close()
1136
1137 def sat_package(config, tmp_working_dir, options, logger):
1138     '''Prepare a dictionary that stores all the needed directories and files to
1139        add in a salomeTool package.
1140     
1141     :param tmp_working_dir str: The temporary local working directory 
1142     :param options OptResult: the options of the launched command
1143     :return: the dictionary that stores all the needed directories and files to
1144              add in a salomeTool package.
1145              {label : (path_on_local_machine, path_in_archive)}
1146     :rtype: dict
1147     '''
1148     d_project = {}
1149
1150     # we include sat himself
1151     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1152
1153     # and we overwrite local.pyconf with a clean wersion.
1154     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1155     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1156     local_cfg = src.pyconf.Config(local_file_path)
1157     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1158     local_cfg.LOCAL["base"] = "default"
1159     local_cfg.LOCAL["workdir"] = "default"
1160     local_cfg.LOCAL["log_dir"] = "default"
1161     local_cfg.LOCAL["archive_dir"] = "default"
1162     local_cfg.LOCAL["VCS"] = "None"
1163     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1164
1165     # if the archive contains a project, we write its relative path in local.pyconf
1166     if options.project:
1167         project_arch_path = os.path.join("projects", options.project, 
1168                                          os.path.basename(options.project_file_path))
1169         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1170
1171     ff = open(local_pyconf_tmp_path, 'w')
1172     local_cfg.__save__(ff, 1)
1173     ff.close()
1174     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1175     return d_project
1176     
1177
1178 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1179     '''Prepare a dictionary that stores all the needed directories and files to
1180        add in a project package.
1181     
1182     :param project_file_path str: The path to the local project.
1183     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1184     :param tmp_working_dir str: The temporary local directory containing some 
1185                                 specific directories or files needed in the 
1186                                 project package
1187     :param embedded_in_sat boolean : the project package is embedded in a sat package
1188     :return: the dictionary that stores all the needed directories and files to
1189              add in a project package.
1190              {label : (path_on_local_machine, path_in_archive)}
1191     :rtype: dict
1192     '''
1193     d_project = {}
1194     # Read the project file and get the directories to add to the package
1195     
1196     try: 
1197       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1198     except:
1199       logger.write("""
1200 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1201       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1202       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1203     
1204     paths = {"APPLICATIONPATH" : "applications",
1205              "PRODUCTPATH" : "products",
1206              "JOBPATH" : "jobs",
1207              "MACHINEPATH" : "machines"}
1208     if not ftp_mode:
1209         paths["ARCHIVEPATH"] = "archives"
1210
1211     # Loop over the project paths and add it
1212     project_file_name = os.path.basename(project_file_path)
1213     for path in paths:
1214         if path not in project_pyconf_cfg:
1215             continue
1216         if embedded_in_sat:
1217             dest_path = os.path.join("projects", name_project, paths[path])
1218             project_file_dest = os.path.join("projects", name_project, project_file_name)
1219         else:
1220             dest_path = paths[path]
1221             project_file_dest = project_file_name
1222
1223         # Add the directory to the files to add in the package
1224         d_project[path] = (project_pyconf_cfg[path], dest_path)
1225
1226         # Modify the value of the path in the package
1227         project_pyconf_cfg[path] = src.pyconf.Reference(
1228                                     project_pyconf_cfg,
1229                                     src.pyconf.DOLLAR,
1230                                     'project_path + "/' + paths[path] + '"')
1231     
1232     # Modify some values
1233     if "project_path" not in project_pyconf_cfg:
1234         project_pyconf_cfg.addMapping("project_path",
1235                                       src.pyconf.Mapping(project_pyconf_cfg),
1236                                       "")
1237     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1238                                                            src.pyconf.DOLLAR,
1239                                                            'PWD')
1240     # we don't want to export these two fields
1241     project_pyconf_cfg.__delitem__("file_path")
1242     project_pyconf_cfg.__delitem__("PWD")
1243     if ftp_mode:
1244         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1245     
1246     # Write the project pyconf file
1247     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1248     ff = open(project_pyconf_tmp_path, 'w')
1249     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1250     project_pyconf_cfg.__save__(ff, 1)
1251     ff.close()
1252     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1253     
1254     return d_project
1255
1256 def add_readme(config, options, where):
1257     readme_path = os.path.join(where, "README")
1258     with codecs.open(readme_path, "w", 'utf-8') as f:
1259
1260     # templates for building the header
1261         readme_header="""
1262 # This package was generated with sat $version
1263 # Date: $date
1264 # User: $user
1265 # Distribution : $dist
1266
1267 In the following, $$ROOT represents the directory where you have installed 
1268 SALOME (the directory where this file is located).
1269
1270 """
1271         readme_compilation_with_binaries="""
1272
1273 compilation based on the binaries used as prerequisites
1274 =======================================================
1275
1276 If you fail to compile the complete application (for example because
1277 you are not root on your system and cannot install missing packages), you
1278 may try a partial compilation based on the binaries.
1279 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1280 and do some substitutions on cmake and .la files (replace the build directories
1281 with local paths).
1282 The procedure to do it is:
1283  1) Remove or rename INSTALL directory if it exists
1284  2) Execute the shell script install_bin.sh:
1285  > cd $ROOT
1286  > ./install_bin.sh
1287  3) Use SalomeTool (as explained in Sources section) and compile only the 
1288     modules you need to (with -p option)
1289
1290 """
1291         readme_header_tpl=string.Template(readme_header)
1292         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1293                 "README_BIN.template")
1294         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1295                 "README_LAUNCHER.template")
1296         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1297                 "README_BIN_VIRTUAL_APP.template")
1298         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1299                 "README_SRC.template")
1300         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1301                 "README_PROJECT.template")
1302         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1303                 "README_SAT.template")
1304
1305         # prepare substitution dictionary
1306         d = dict()
1307         d['user'] = config.VARS.user
1308         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1309         d['version'] = src.get_salometool_version(config)
1310         d['dist'] = config.VARS.dist
1311         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1312
1313         if options.binaries or options.sources:
1314             d['application'] = config.VARS.application
1315             f.write("# Application: " + d['application'] + "\n")
1316             if 'KERNEL' in config.APPLICATION.products:
1317                 VersionSalome = src.get_salome_version(config)
1318                 # Case where SALOME has the launcher that uses the SalomeContext API
1319                 if VersionSalome >= 730:
1320                     d['launcher'] = config.APPLICATION.profile.launcher_name
1321                 else:
1322                     d['virtual_app'] = 'runAppli' # this info is not used now)
1323
1324         # write the specific sections
1325         if options.binaries:
1326             f.write(src.template.substitute(readme_template_path_bin, d))
1327             if "virtual_app" in d:
1328                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1329             if "launcher" in d:
1330                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1331
1332         if options.sources:
1333             f.write(src.template.substitute(readme_template_path_src, d))
1334
1335         if options.binaries and options.sources:
1336             f.write(readme_compilation_with_binaries)
1337
1338         if options.project:
1339             f.write(src.template.substitute(readme_template_path_pro, d))
1340
1341         if options.sat:
1342             f.write(src.template.substitute(readme_template_path_sat, d))
1343     
1344     return readme_path
1345
1346 def update_config(config, prop, value):
1347     '''Remove from config.APPLICATION.products the products that have the property given as input.
1348     
1349     :param config Config: The global config.
1350     :param prop str: The property to filter
1351     :param value str: The value of the property to filter
1352     '''
1353     # if there is no APPLICATION (ex sat package -t) : nothing to do
1354     if "APPLICATION" in config:
1355         l_product_to_remove = []
1356         for product_name in config.APPLICATION.products.keys():
1357             prod_cfg = src.product.get_product_config(config, product_name)
1358             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1359                 l_product_to_remove.append(product_name)
1360         for product_name in l_product_to_remove:
1361             config.APPLICATION.products.__delitem__(product_name)
1362
1363 def description():
1364     '''method that is called when salomeTools is called with --help option.
1365     
1366     :return: The text to display for the package command description.
1367     :rtype: str
1368     '''
1369     return _("""
1370 The package command creates a tar file archive of a product.
1371 There are four kinds of archive, which can be mixed:
1372
1373  1 - The binary archive. 
1374      It contains the product installation directories plus a launcher.
1375  2 - The sources archive. 
1376      It contains the product archives, a project (the application plus salomeTools).
1377  3 - The project archive. 
1378      It contains a project (give the project file path as argument).
1379  4 - The salomeTools archive. 
1380      It contains code utility salomeTools.
1381
1382 example:
1383  >> sat package SALOME-master --binaries --sources""")
1384   
1385 def run(args, runner, logger):
1386     '''method that is called when salomeTools is called with package parameter.
1387     '''
1388     
1389     # Parse the options
1390     (options, args) = parser.parse_args(args)
1391
1392     # Check that a type of package is called, and only one
1393     all_option_types = (options.binaries,
1394                         options.sources,
1395                         options.project not in ["", None],
1396                         options.sat)
1397
1398     # Check if no option for package type
1399     if all_option_types.count(True) == 0:
1400         msg = _("Error: Precise a type for the package\nUse one of the "
1401                 "following options: --binaries, --sources, --project or"
1402                 " --salometools")
1403         logger.write(src.printcolors.printcError(msg), 1)
1404         logger.write("\n", 1)
1405         return 1
1406     
1407     # The repository where to put the package if not Binary or Source
1408     package_default_path = runner.cfg.LOCAL.workdir
1409     
1410     # if the package contains binaries or sources:
1411     if options.binaries or options.sources:
1412         # Check that the command has been called with an application
1413         src.check_config_has_application(runner.cfg)
1414
1415         # Display information
1416         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1417                                                     runner.cfg.VARS.application), 1)
1418         
1419         # Get the default directory where to put the packages
1420         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1421         src.ensure_path_exists(package_default_path)
1422         
1423     # if the package contains a project:
1424     if options.project:
1425         # check that the project is visible by SAT
1426         projectNameFile = options.project + ".pyconf"
1427         foundProject = None
1428         for i in runner.cfg.PROJECTS.project_file_paths:
1429             baseName = os.path.basename(i)
1430             if baseName == projectNameFile:
1431                 foundProject = i
1432                 break
1433
1434         if foundProject is None:
1435             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1436             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1437 known projects are:
1438 %(2)s
1439
1440 Please add it in file:
1441 %(3)s""" % \
1442                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1443             logger.write(src.printcolors.printcError(msg), 1)
1444             logger.write("\n", 1)
1445             return 1
1446         else:
1447             options.project_file_path = foundProject
1448             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1449     
1450     # Remove the products that are filtered by the --without_properties option
1451     if options.without_properties:
1452         app = runner.cfg.APPLICATION
1453         logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1454         prop, value = options.without_properties
1455         update_config(runner.cfg, prop, value)
1456         logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1457
1458     # Remove from config the products that have the not_in_package property
1459     update_config(runner.cfg, "not_in_package", "yes")
1460     
1461     # get the name of the archive or build it
1462     if options.name:
1463         if os.path.basename(options.name) == options.name:
1464             # only a name (not a path)
1465             archive_name = options.name           
1466             dir_name = package_default_path
1467         else:
1468             archive_name = os.path.basename(options.name)
1469             dir_name = os.path.dirname(options.name)
1470         
1471         # suppress extension
1472         if archive_name[-len(".tgz"):] == ".tgz":
1473             archive_name = archive_name[:-len(".tgz")]
1474         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1475             archive_name = archive_name[:-len(".tar.gz")]
1476         
1477     else:
1478         archive_name=""
1479         dir_name = package_default_path
1480         if options.binaries or options.sources:
1481             archive_name = runner.cfg.APPLICATION.name
1482
1483         if options.binaries:
1484             archive_name += "-"+runner.cfg.VARS.dist
1485             
1486         if options.sources:
1487             archive_name += "-SRC"
1488             if options.with_vcs:
1489                 archive_name += "-VCS"
1490
1491         if options.sat:
1492             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1493
1494         if options.project:
1495             if options.sat:
1496                 archive_name += "_" 
1497             project_name = options.project
1498             archive_name += ("satproject_" + project_name)
1499  
1500         if len(archive_name)==0: # no option worked 
1501             msg = _("Error: Cannot name the archive\n"
1502                     " check if at least one of the following options was "
1503                     "selected : --binaries, --sources, --project or"
1504                     " --salometools")
1505             logger.write(src.printcolors.printcError(msg), 1)
1506             logger.write("\n", 1)
1507             return 1
1508  
1509     path_targz = os.path.join(dir_name, archive_name + ".tgz")
1510     
1511     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1512
1513     # Create a working directory for all files that are produced during the
1514     # package creation and that will be removed at the end of the command
1515     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1516     src.ensure_path_exists(tmp_working_dir)
1517     logger.write("\n", 5)
1518     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1519     
1520     logger.write("\n", 3)
1521
1522     msg = _("Preparation of files to add to the archive")
1523     logger.write(src.printcolors.printcLabel(msg), 2)
1524     logger.write("\n", 2)
1525     
1526     d_files_to_add={}  # content of the archive
1527
1528     # a dict to hold paths that will need to be substitute for users recompilations
1529     d_paths_to_substitute={}  
1530
1531     if options.binaries:
1532         d_bin_files_to_add = binary_package(runner.cfg,
1533                                             logger,
1534                                             options,
1535                                             tmp_working_dir)
1536         # for all binaries dir, store the substitution that will be required 
1537         # for extra compilations
1538         for key in d_bin_files_to_add:
1539             if key.endswith("(bin)"):
1540                 source_dir = d_bin_files_to_add[key][0]
1541                 path_in_archive = d_bin_files_to_add[key][1].replace(
1542                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1543                    runner.cfg.INTERNAL.config.install_dir)
1544                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1545                     # if basename is the same we will just substitute the dirname 
1546                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1547                         os.path.dirname(path_in_archive)
1548                 else:
1549                     d_paths_to_substitute[source_dir]=path_in_archive
1550
1551         d_files_to_add.update(d_bin_files_to_add)
1552
1553     if options.sources:
1554         d_files_to_add.update(source_package(runner,
1555                                         runner.cfg,
1556                                         logger, 
1557                                         options,
1558                                         tmp_working_dir))
1559         if options.binaries:
1560             # for archives with bin and sources we provide a shell script able to 
1561             # install binaries for compilation
1562             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1563                                                       tmp_working_dir,
1564                                                       d_paths_to_substitute,
1565                                                       "install_bin.sh")
1566             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1567             logger.write("substitutions that need to be done later : \n", 5)
1568             logger.write(str(d_paths_to_substitute), 5)
1569             logger.write("\n", 5)
1570     else:
1571         # --salomeTool option is not considered when --sources is selected, as this option
1572         # already brings salomeTool!
1573         if options.sat:
1574             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1575                                   options, logger))
1576         
1577     if options.project:
1578         DBG.write("config for package %s" % project_name, runner.cfg)
1579         d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1580
1581     if not(d_files_to_add):
1582         msg = _("Error: Empty dictionnary to build the archive!\n")
1583         logger.write(src.printcolors.printcError(msg), 1)
1584         logger.write("\n", 1)
1585         return 1
1586
1587     # Add the README file in the package
1588     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1589     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1590
1591     # Add the additional files of option add_files
1592     if options.add_files:
1593         for file_path in options.add_files:
1594             if not os.path.exists(file_path):
1595                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1596                 continue
1597             file_name = os.path.basename(file_path)
1598             d_files_to_add[file_name] = (file_path, file_name)
1599
1600     logger.write("\n", 2)
1601     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1602     logger.write("\n", 2)
1603     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1604
1605     res = 0
1606     try:
1607         # Creating the object tarfile
1608         tar = tarfile.open(path_targz, mode='w:gz')
1609         
1610         # get the filtering function if needed
1611         filter_function = exclude_VCS_and_extensions
1612
1613         # Add the files to the tarfile object
1614         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1615         tar.close()
1616     except KeyboardInterrupt:
1617         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1618         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1619         # remove the working directory
1620         shutil.rmtree(tmp_working_dir)
1621         logger.write(_("OK"), 1)
1622         logger.write(_("\n"), 1)
1623         return 1
1624     
1625     # case if no application, only package sat as 'sat package -t'
1626     try:
1627         app = runner.cfg.APPLICATION
1628     except:
1629         app = None
1630
1631     # unconditionaly remove the tmp_local_working_dir
1632     if app is not None:
1633         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1634         if os.path.isdir(tmp_local_working_dir):
1635             shutil.rmtree(tmp_local_working_dir)
1636
1637     # remove the tmp directory, unless user has registered as developer
1638     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1639         shutil.rmtree(tmp_working_dir)
1640     
1641     # Print again the path of the package
1642     logger.write("\n", 2)
1643     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1644     
1645     return res