Salome HOME
suppression du flag global no_base. desormais seul le flag base sera utilise
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 BINARY = "binary"
35 SOURCE = "Source"
36 PROJECT = "Project"
37 SAT = "Sat"
38
39 ARCHIVE_DIR = "ARCHIVES"
40 PROJECT_DIR = "PROJECT"
41
42 IGNORED_DIRS = [".git", ".svn"]
43 IGNORED_EXTENSIONS = []
44
45 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
46
47 PROJECT_TEMPLATE = """#!/usr/bin/env python
48 #-*- coding:utf-8 -*-
49
50 # The path to the archive root directory
51 root_path : $PWD + "/../"
52 # path to the PROJECT
53 project_path : $PWD + "/"
54
55 # Where to search the archives of the products
56 ARCHIVEPATH : $root_path + "ARCHIVES"
57 # Where to search the pyconf of the applications
58 APPLICATIONPATH : $project_path + "applications/"
59 # Where to search the pyconf of the products
60 PRODUCTPATH : $project_path + "products/"
61 # Where to search the pyconf of the jobs of the project
62 JOBPATH : $project_path + "jobs/"
63 # Where to search the pyconf of the machines of the project
64 MACHINEPATH : $project_path + "machines/"
65 """
66
67 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
68 #-*- coding:utf-8 -*-
69
70   LOCAL :
71   {
72     base : 'default'
73     workdir : 'default'
74     log_dir : 'default'
75     archive_dir : 'default'
76     VCS : None
77     tag : None
78   }
79
80 PROJECTS :
81 {
82 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
83 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
84 }
85 """)
86
87 # Define all possible option for the package command :  sat package <options>
88 parser = src.options.Options()
89 parser.add_option('b', 'binaries', 'boolean', 'binaries',
90     _('Optional: Produce a binary package.'), False)
91 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
92     _('Optional: Only binary package: produce the archive even if '
93       'there are some missing products.'), False)
94 parser.add_option('s', 'sources', 'boolean', 'sources',
95     _('Optional: Produce a compilable archive of the sources of the '
96       'application.'), False)
97 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
98     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
99       'Sat prepare will use VCS mode instead to retrieve them'),
100     False)
101 parser.add_option('', 'ftp', 'boolean', 'ftp',
102     _('Optional: Do not embed archives for products in archive mode.' 
103     'Sat prepare will use ftp instead to retrieve them'),
104     False)
105 parser.add_option('p', 'project', 'string', 'project',
106     _('Optional: Produce an archive that contains a project.'), "")
107 parser.add_option('t', 'salometools', 'boolean', 'sat',
108     _('Optional: Produce an archive that contains salomeTools.'), False)
109 parser.add_option('n', 'name', 'string', 'name',
110     _('Optional: The name or full path of the archive.'), None)
111 parser.add_option('', 'add_files', 'list2', 'add_files',
112     _('Optional: The list of additional files to add to the archive.'), [])
113 parser.add_option('', 'without_properties', 'properties', 'without_properties',
114     _('Optional: Filter the products by their properties.\n\tSyntax: '
115       '--without_properties <property>:<value>'))
116
117
118 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
119     '''Create an archive containing all directories and files that are given in
120        the d_content argument.
121     
122     :param tar tarfile: The tarfile instance used to make the archive.
123     :param name_archive str: The name of the archive to make.
124     :param d_content dict: The dictionary that contain all directories and files
125                            to add in the archive.
126                            d_content[label] = 
127                                         (path_on_local_machine, path_in_archive)
128     :param logger Logger: the logging instance
129     :param f_exclude Function: the function that filters
130     :return: 0 if success, 1 if not.
131     :rtype: int
132     '''
133     # get the max length of the messages in order to make the display
134     max_len = len(max(d_content.keys(), key=len))
135     
136     success = 0
137     # loop over each directory or file stored in the d_content dictionary
138     names = sorted(d_content.keys())
139     DBG.write("add tar names", names)
140
141     # used to avoid duplications (for pip install in python, or single_install_dir cases)
142     already_added=set() 
143     for name in names:
144         # display information
145         len_points = max_len - len(name) + 3
146         local_path, archive_path = d_content[name]
147         in_archive = os.path.join(name_archive, archive_path)
148         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
149         # Get the local path and the path in archive 
150         # of the directory or file to add
151         # Add it in the archive
152         try:
153             key=local_path+"->"+in_archive
154             if key not in already_added:
155                 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
156                 already_added.add(key)
157             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
158         except Exception as e:
159             logger.write(src.printcolors.printcError(_("KO ")), 3)
160             logger.write(str(e), 3)
161             success = 1
162         logger.write("\n", 3)
163     return success
164
165 def exclude_VCS_and_extensions(filename):
166     ''' The function that is used to exclude from package the link to the 
167         VCS repositories (like .git)
168
169     :param filename Str: The filname to exclude (or not).
170     :return: True if the file has to be exclude
171     :rtype: Boolean
172     '''
173     for dir_name in IGNORED_DIRS:
174         if dir_name in filename:
175             return True
176     for extension in IGNORED_EXTENSIONS:
177         if filename.endswith(extension):
178             return True
179     return False
180
181 def produce_relative_launcher(config,
182                               logger,
183                               file_dir,
184                               file_name,
185                               binaries_dir_name):
186     '''Create a specific SALOME launcher for the binary package. This launcher 
187        uses relative paths.
188     
189     :param config Config: The global configuration.
190     :param logger Logger: the logging instance
191     :param file_dir str: the directory where to put the launcher
192     :param file_name str: The launcher name
193     :param binaries_dir_name str: the name of the repository where the binaries
194                                   are, in the archive.
195     :return: the path of the produced launcher
196     :rtype: str
197     '''
198     
199     # get KERNEL installation path 
200     kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
201
202     # set kernel bin dir (considering fhs property)
203     kernel_cfg = src.product.get_product_config(config, "KERNEL")
204     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
205         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
206     else:
207         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
208
209     # check if the application contains an application module
210     # check also if the application has a distene product, 
211     # in this case get its licence file name
212     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
213     salome_application_name="Not defined" 
214     distene_licence_file_name=False
215     for prod_name, prod_info in l_product_info:
216         # look for a "salome application" and a distene product
217         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
218             distene_licence_file_name = src.product.product_has_licence(prod_info, 
219                                             config.PATHS.LICENCEPATH) 
220         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
221             salome_application_name=prod_info.name
222
223     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
224     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
225     if salome_application_name == "Not defined":
226         app_root_dir=kernel_root_dir
227     else:
228         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
229
230     additional_env={}
231     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
232                                                    config.VARS.sep + bin_kernel_install_dir
233     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
234         additional_env['sat_python_version'] = 3
235     else:
236         additional_env['sat_python_version'] = 2
237
238     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
239
240     # create an environment file writer
241     writer = src.environment.FileEnvWriter(config,
242                                            logger,
243                                            file_dir,
244                                            src_root=None,
245                                            env_info=None)
246     
247     filepath = os.path.join(file_dir, file_name)
248     # Write
249     writer.write_env_file(filepath,
250                           False,  # for launch
251                           "cfgForPy",
252                           additional_env=additional_env,
253                           no_path_init="False",
254                           for_package = binaries_dir_name)
255     
256     # Little hack to put out_dir_Path outside the strings
257     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
258     src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
259     
260     # A hack to put a call to a file for distene licence.
261     # It does nothing to an application that has no distene product
262     if distene_licence_file_name:
263         logger.write("Application has a distene licence file! We use it in package launcher", 5)
264         hack_for_distene_licence(filepath, distene_licence_file_name)
265        
266     # change the rights in order to make the file executable for everybody
267     os.chmod(filepath,
268              stat.S_IRUSR |
269              stat.S_IRGRP |
270              stat.S_IROTH |
271              stat.S_IWUSR |
272              stat.S_IXUSR |
273              stat.S_IXGRP |
274              stat.S_IXOTH)
275
276     return filepath
277
278 def hack_for_distene_licence(filepath, licence_file):
279     '''Replace the distene licence env variable by a call to a file.
280     
281     :param filepath Str: The path to the launcher to modify.
282     '''  
283     shutil.move(filepath, filepath + "_old")
284     fileout= filepath
285     filein = filepath + "_old"
286     fin = open(filein, "r")
287     fout = open(fileout, "w")
288     text = fin.readlines()
289     # Find the Distene section
290     num_line = -1
291     for i,line in enumerate(text):
292         if "# Set DISTENE License" in line:
293             num_line = i
294             break
295     if num_line == -1:
296         # No distene product, there is nothing to do
297         fin.close()
298         for line in text:
299             fout.write(line)
300         fout.close()
301         return
302     del text[num_line +1]
303     del text[num_line +1]
304     text_to_insert ="""    try:
305         distene_licence_file=r"%s"
306         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
307             import importlib.util
308             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
309             distene=importlib.util.module_from_spec(spec_dist)
310             spec_dist.loader.exec_module(distene)
311         else:
312             import imp
313             distene = imp.load_source('distene_licence', distene_licence_file)
314         distene.set_distene_variables(context)
315     except:
316         pass\n"""  % licence_file
317     text.insert(num_line + 1, text_to_insert)
318     for line in text:
319         fout.write(line)
320     fin.close()    
321     fout.close()
322     return
323     
324 def produce_relative_env_files(config,
325                               logger,
326                               file_dir,
327                               binaries_dir_name):
328     '''Create some specific environment files for the binary package. These 
329        files use relative paths.
330     
331     :param config Config: The global configuration.
332     :param logger Logger: the logging instance
333     :param file_dir str: the directory where to put the files
334     :param binaries_dir_name str: the name of the repository where the binaries
335                                   are, in the archive.
336     :return: the list of path of the produced environment files
337     :rtype: List
338     '''  
339     # create an environment file writer
340     writer = src.environment.FileEnvWriter(config,
341                                            logger,
342                                            file_dir,
343                                            src_root=None)
344     
345     if src.architecture.is_windows():
346       shell = "bat"
347       filename  = "env_launch.bat"
348     else:
349       shell = "bash"
350       filename  = "env_launch.sh"
351
352     # Write
353     filepath = writer.write_env_file(filename,
354                           False, # for launch
355                           shell,
356                           for_package = binaries_dir_name)
357
358     # Little hack to put out_dir_Path as environment variable
359     if src.architecture.is_windows() :
360       src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
361       src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
362     else:
363       src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
364
365     # change the rights in order to make the file executable for everybody
366     os.chmod(filepath,
367              stat.S_IRUSR |
368              stat.S_IRGRP |
369              stat.S_IROTH |
370              stat.S_IWUSR |
371              stat.S_IXUSR |
372              stat.S_IXGRP |
373              stat.S_IXOTH)
374     
375     return filepath
376
377 def produce_install_bin_file(config,
378                              logger,
379                              file_dir,
380                              d_sub,
381                              file_name):
382     '''Create a bash shell script which do substitutions in BIRARIES dir 
383        in order to use it for extra compilations.
384     
385     :param config Config: The global configuration.
386     :param logger Logger: the logging instance
387     :param file_dir str: the directory where to put the files
388     :param d_sub, dict: the dictionnary that contains the substitutions to be done
389     :param file_name str: the name of the install script file
390     :return: the produced file
391     :rtype: str
392     '''  
393     # Write
394     filepath = os.path.join(file_dir, file_name)
395     # open the file and write into it
396     # use codec utf-8 as sat variables are in unicode
397     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
398         installbin_template_path = os.path.join(config.VARS.internal_dir,
399                                         "INSTALL_BIN.template")
400         
401         # build the name of the directory that will contain the binaries
402         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
403         # build the substitution loop
404         loop_cmd = "for f in $(grep -RIl"
405         for key in d_sub:
406             loop_cmd += " -e "+ key
407         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
408                     '); do\n     sed -i "\n'
409         for key in d_sub:
410             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
411         loop_cmd += '            " $f\ndone'
412
413         d={}
414         d["BINARIES_DIR"] = binaries_dir_name
415         d["SUBSTITUTION_LOOP"]=loop_cmd
416         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
417         
418         # substitute the template and write it in file
419         content=src.template.substitute(installbin_template_path, d)
420         installbin_file.write(content)
421         # change the rights in order to make the file executable for everybody
422         os.chmod(filepath,
423                  stat.S_IRUSR |
424                  stat.S_IRGRP |
425                  stat.S_IROTH |
426                  stat.S_IWUSR |
427                  stat.S_IXUSR |
428                  stat.S_IXGRP |
429                  stat.S_IXOTH)
430     
431     return filepath
432
433 def product_appli_creation_script(config,
434                                   logger,
435                                   file_dir,
436                                   binaries_dir_name):
437     '''Create a script that can produce an application (EDF style) in the binary
438        package.
439     
440     :param config Config: The global configuration.
441     :param logger Logger: the logging instance
442     :param file_dir str: the directory where to put the file
443     :param binaries_dir_name str: the name of the repository where the binaries
444                                   are, in the archive.
445     :return: the path of the produced script file
446     :rtype: Str
447     '''
448     template_name = "create_appli.py.for_bin_packages.template"
449     template_path = os.path.join(config.VARS.internal_dir, template_name)
450     text_to_fill = open(template_path, "r").read()
451     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
452                                         '"' + binaries_dir_name + '"')
453     
454     text_to_add = ""
455     for product_name in get_SALOME_modules(config):
456         product_info = src.product.get_product_config(config, product_name)
457        
458         if src.product.product_is_smesh_plugin(product_info):
459             continue
460
461         if 'install_dir' in product_info and bool(product_info.install_dir):
462             if src.product.product_is_cpp(product_info):
463                 # cpp module
464                 for cpp_name in src.product.get_product_components(product_info):
465                     line_to_add = ("<module name=\"" + 
466                                    cpp_name + 
467                                    "\" gui=\"yes\" path=\"''' + "
468                                    "os.path.join(dir_bin_name, \"" + 
469                                    cpp_name + "\") + '''\"/>")
470             else:
471                 # regular module
472                 line_to_add = ("<module name=\"" + 
473                                product_name + 
474                                "\" gui=\"yes\" path=\"''' + "
475                                "os.path.join(dir_bin_name, \"" + 
476                                product_name + "\") + '''\"/>")
477             text_to_add += line_to_add + "\n"
478     
479     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
480     
481     tmp_file_path = os.path.join(file_dir, "create_appli.py")
482     ff = open(tmp_file_path, "w")
483     ff.write(filled_text)
484     ff.close()
485     
486     # change the rights in order to make the file executable for everybody
487     os.chmod(tmp_file_path,
488              stat.S_IRUSR |
489              stat.S_IRGRP |
490              stat.S_IROTH |
491              stat.S_IWUSR |
492              stat.S_IXUSR |
493              stat.S_IXGRP |
494              stat.S_IXOTH)
495     
496     return tmp_file_path
497
498 def binary_package(config, logger, options, tmp_working_dir):
499     '''Prepare a dictionary that stores all the needed directories and files to
500        add in a binary package.
501     
502     :param config Config: The global configuration.
503     :param logger Logger: the logging instance
504     :param options OptResult: the options of the launched command
505     :param tmp_working_dir str: The temporary local directory containing some 
506                                 specific directories or files needed in the 
507                                 binary package
508     :return: the dictionary that stores all the needed directories and files to
509              add in a binary package.
510              {label : (path_on_local_machine, path_in_archive)}
511     :rtype: dict
512     '''
513
514     # Get the list of product installation to add to the archive
515     l_products_name = sorted(config.APPLICATION.products.keys())
516     l_product_info = src.product.get_products_infos(l_products_name,
517                                                     config)
518     l_install_dir = []
519     l_source_dir = []
520     l_not_installed = []
521     l_sources_not_present = []
522     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
523     if ("APPLICATION" in config  and
524         "properties"  in config.APPLICATION  and
525         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
526         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
527             generate_mesa_launcher=True
528
529     for prod_name, prod_info in l_product_info:
530         # skip product with property not_in_package set to yes
531         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
532             continue  
533
534         # Add the sources of the products that have the property 
535         # sources_in_package : "yes"
536         if src.get_property_in_product_cfg(prod_info,
537                                            "sources_in_package") == "yes":
538             if os.path.exists(prod_info.source_dir):
539                 l_source_dir.append((prod_name, prod_info.source_dir))
540             else:
541                 l_sources_not_present.append(prod_name)
542
543         # ignore the native and fixed products for install directories
544         if (src.product.product_is_native(prod_info) 
545                 or src.product.product_is_fixed(prod_info)
546                 or not src.product.product_compiles(prod_info)):
547             continue
548         if src.product.check_installation(config, prod_info):
549             l_install_dir.append((prod_name, prod_info.install_dir))
550         else:
551             l_not_installed.append(prod_name)
552         
553         # Add also the cpp generated modules (if any)
554         if src.product.product_is_cpp(prod_info):
555             # cpp module
556             for name_cpp in src.product.get_product_components(prod_info):
557                 install_dir = os.path.join(config.APPLICATION.workdir,
558                                            config.INTERNAL.config.install_dir,
559                                            name_cpp) 
560                 if os.path.exists(install_dir):
561                     l_install_dir.append((name_cpp, install_dir))
562                 else:
563                     l_not_installed.append(name_cpp)
564         
565     # check the name of the directory that (could) contains the binaries 
566     # from previous detar
567     binaries_from_detar = os.path.join(
568                               config.APPLICATION.workdir,
569                               config.INTERNAL.config.binary_dir + config.VARS.dist)
570     if os.path.exists(binaries_from_detar):
571          logger.write("""
572 WARNING: existing binaries directory from previous detar installation:
573          %s
574          To make new package from this, you have to: 
575          1) install binaries in INSTALL directory with the script "install_bin.sh" 
576             see README file for more details
577          2) or recompile everything in INSTALL with "sat compile" command 
578             this step is long, and requires some linux packages to be installed 
579             on your system\n
580 """ % binaries_from_detar)
581     
582     # Print warning or error if there are some missing products
583     if len(l_not_installed) > 0:
584         text_missing_prods = ""
585         for p_name in l_not_installed:
586             text_missing_prods += " - " + p_name + "\n"
587         if not options.force_creation:
588             msg = _("ERROR: there are missing product installations:")
589             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
590                                      text_missing_prods),
591                          1)
592             raise src.SatException(msg)
593         else:
594             msg = _("WARNING: there are missing products installations:")
595             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
596                                      text_missing_prods),
597                          1)
598
599     # Do the same for sources
600     if len(l_sources_not_present) > 0:
601         text_missing_prods = ""
602         for p_name in l_sources_not_present:
603             text_missing_prods += "-" + p_name + "\n"
604         if not options.force_creation:
605             msg = _("ERROR: there are missing product sources:")
606             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
607                                      text_missing_prods),
608                          1)
609             raise src.SatException(msg)
610         else:
611             msg = _("WARNING: there are missing products sources:")
612             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
613                                      text_missing_prods),
614                          1)
615  
616     # construct the name of the directory that will contain the binaries
617     if src.architecture.is_windows():
618         binaries_dir_name = config.INTERNAL.config.binary_dir
619     else:
620         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
621     # construct the correlation table between the product names, there 
622     # actual install directories and there install directory in archive
623     d_products = {}
624     for prod_name, install_dir in l_install_dir:
625         path_in_archive = os.path.join(binaries_dir_name, os.path.basename(install_dir))
626         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
627         
628     for prod_name, source_dir in l_source_dir:
629         path_in_archive = os.path.join("SOURCES", prod_name)
630         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
631
632     # for packages of SALOME applications including KERNEL, 
633     # we produce a salome launcher or a virtual application (depending on salome version)
634     if 'KERNEL' in config.APPLICATION.products:
635         VersionSalome = src.get_salome_version(config)
636         # Case where SALOME has the launcher that uses the SalomeContext API
637         if VersionSalome >= 730:
638             # create the relative launcher and add it to the files to add
639             launcher_name = src.get_launcher_name(config)
640             launcher_package = produce_relative_launcher(config,
641                                                  logger,
642                                                  tmp_working_dir,
643                                                  launcher_name,
644                                                  binaries_dir_name)
645             d_products["launcher"] = (launcher_package, launcher_name)
646
647             # if the application contains mesa products, we generate in addition to the 
648             # classical salome launcher a launcher using mesa and called mesa_salome 
649             # (the mesa launcher will be used for remote usage through ssh).
650             if generate_mesa_launcher:
651                 #if there is one : store the use_mesa property
652                 restore_use_mesa_option=None
653                 if ('properties' in config.APPLICATION and 
654                     'use_mesa' in config.APPLICATION.properties):
655                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
656
657                 # activate mesa property, and generate a mesa launcher
658                 src.activate_mesa_property(config)  #activate use_mesa property
659                 launcher_mesa_name="mesa_"+launcher_name
660                 launcher_package_mesa = produce_relative_launcher(config,
661                                                      logger,
662                                                      tmp_working_dir,
663                                                      launcher_mesa_name,
664                                                      binaries_dir_name)
665                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
666
667                 # if there was a use_mesa value, we restore it
668                 # else we set it to the default value "no"
669                 if restore_use_mesa_option != None:
670                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
671                 else:
672                     config.APPLICATION.properties.use_mesa="no"
673
674             if options.sources:
675                 # if we mix binaries and sources, we add a copy of the launcher, 
676                 # prefixed  with "bin",in order to avoid clashes
677                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
678         else:
679             # Provide a script for the creation of an application EDF style
680             appli_script = product_appli_creation_script(config,
681                                                         logger,
682                                                         tmp_working_dir,
683                                                         binaries_dir_name)
684             
685             d_products["appli script"] = (appli_script, "create_appli.py")
686
687     # Put also the environment file
688     env_file = produce_relative_env_files(config,
689                                            logger,
690                                            tmp_working_dir,
691                                            binaries_dir_name)
692
693     if src.architecture.is_windows():
694       filename  = "env_launch.bat"
695     else:
696       filename  = "env_launch.sh"
697     d_products["environment file"] = (env_file, filename)      
698     return d_products
699
700 def source_package(sat, config, logger, options, tmp_working_dir):
701     '''Prepare a dictionary that stores all the needed directories and files to
702        add in a source package.
703     
704     :param config Config: The global configuration.
705     :param logger Logger: the logging instance
706     :param options OptResult: the options of the launched command
707     :param tmp_working_dir str: The temporary local directory containing some 
708                                 specific directories or files needed in the 
709                                 binary package
710     :return: the dictionary that stores all the needed directories and files to
711              add in a source package.
712              {label : (path_on_local_machine, path_in_archive)}
713     :rtype: dict
714     '''
715     
716     d_archives={}
717     # Get all the products that are prepared using an archive
718     # unless ftp mode is specified (in this case the user of the
719     # archive will get the sources through the ftp mode of sat prepare
720     if not options.ftp:
721         logger.write("Find archive products ... ")
722         d_archives, l_pinfo_vcs = get_archives(config, logger)
723         logger.write("Done\n")
724
725     d_archives_vcs = {}
726     if not options.with_vcs and len(l_pinfo_vcs) > 0:
727         # Make archives with the products that are not prepared using an archive
728         # (git, cvs, svn, etc)
729         logger.write("Construct archives for vcs products ... ")
730         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
731                                           sat,
732                                           config,
733                                           logger,
734                                           tmp_working_dir)
735         logger.write("Done\n")
736
737     # Create a project
738     logger.write("Create the project ... ")
739     d_project = create_project_for_src_package(config,
740                                                tmp_working_dir,
741                                                options.with_vcs,
742                                                options.ftp)
743     logger.write("Done\n")
744     
745     # Add salomeTools
746     tmp_sat = add_salomeTools(config, tmp_working_dir)
747     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
748     
749     # Add a sat symbolic link if not win
750     if not src.architecture.is_windows():
751         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
752         try:
753             t = os.getcwd()
754         except:
755             # In the jobs, os.getcwd() can fail
756             t = config.LOCAL.workdir
757         os.chdir(tmp_working_dir)
758         if os.path.lexists(tmp_satlink_path):
759             os.remove(tmp_satlink_path)
760         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
761         os.chdir(t)
762         
763         d_sat["sat link"] = (tmp_satlink_path, "sat")
764     
765     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
766     return d_source
767
768 def get_archives(config, logger):
769     '''Find all the products that are get using an archive and all the products
770        that are get using a vcs (git, cvs, svn) repository.
771     
772     :param config Config: The global configuration.
773     :param logger Logger: the logging instance
774     :return: the dictionary {name_product : 
775              (local path of its archive, path in the package of its archive )}
776              and the list of specific configuration corresponding to the vcs 
777              products
778     :rtype: (Dict, List)
779     '''
780     # Get the list of product informations
781     l_products_name = config.APPLICATION.products.keys()
782     l_product_info = src.product.get_products_infos(l_products_name,
783                                                     config)
784     d_archives = {}
785     l_pinfo_vcs = []
786     for p_name, p_info in l_product_info:
787         # skip product with property not_in_package set to yes
788         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
789             continue  
790         # ignore the native and fixed products
791         if (src.product.product_is_native(p_info) 
792                 or src.product.product_is_fixed(p_info)):
793             continue
794         if p_info.get_source == "archive":
795             archive_path = p_info.archive_info.archive_name
796             archive_name = os.path.basename(archive_path)
797             d_archives[p_name] = (archive_path,
798                                   os.path.join(ARCHIVE_DIR, archive_name))
799             if (src.appli_test_property(config,"pip", "yes") and 
800                 src.product.product_test_property(p_info,"pip", "yes")):
801                 # if pip mode is activated, and product is managed by pip
802                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
803                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
804                     "%s-%s*" % (p_info.name, p_info.version))
805                 pip_wheel_path=glob.glob(pip_wheel_pattern)
806                 msg_pip_not_found="Error in get_archive, pip wheel for "\
807                                   "product %s-%s was not found in %s directory"
808                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
809                                   "product %s-%s were found in %s directory"
810                 if len(pip_wheel_path)==0:
811                     raise src.SatException(msg_pip_not_found %\
812                         (p_info.name, p_info.version, pip_wheels_dir))
813                 if len(pip_wheel_path)>1:
814                     raise src.SatException(msg_pip_two_or_more %\
815                         (p_info.name, p_info.version, pip_wheels_dir))
816
817                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
818                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
819                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
820         else:
821             # this product is not managed by archive, 
822             # an archive of the vcs directory will be created by get_archive_vcs
823             l_pinfo_vcs.append((p_name, p_info)) 
824             
825     return d_archives, l_pinfo_vcs
826
827 def add_salomeTools(config, tmp_working_dir):
828     '''Prepare a version of salomeTools that has a specific local.pyconf file 
829        configured for a source package.
830
831     :param config Config: The global configuration.
832     :param tmp_working_dir str: The temporary local directory containing some 
833                                 specific directories or files needed in the 
834                                 source package
835     :return: The path to the local salomeTools directory to add in the package
836     :rtype: str
837     '''
838     # Copy sat in the temporary working directory
839     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
840     sat_running_path = src.Path(config.VARS.salometoolsway)
841     sat_running_path.copy(sat_tmp_path)
842     
843     # Update the local.pyconf file that contains the path to the project
844     local_pyconf_name = "local.pyconf"
845     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
846     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
847     # Remove the .pyconf file in the root directory of salomeTools if there is
848     # any. (For example when launching jobs, a pyconf file describing the jobs 
849     # can be here and is not useful) 
850     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
851     for file_or_dir in files_or_dir_SAT:
852         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
853             file_path = os.path.join(tmp_working_dir,
854                                      "salomeTools",
855                                      file_or_dir)
856             os.remove(file_path)
857     
858     ff = open(local_pyconf_file, "w")
859     ff.write(LOCAL_TEMPLATE)
860     ff.close()
861     
862     return sat_tmp_path.path
863
864 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
865     '''For sources package that require that all products are get using an 
866        archive, one has to create some archive for the vcs products.
867        So this method calls the clean and source command of sat and then create
868        the archives.
869
870     :param l_pinfo_vcs List: The list of specific configuration corresponding to
871                              each vcs product
872     :param sat Sat: The Sat instance that can be called to clean and source the
873                     products
874     :param config Config: The global configuration.
875     :param logger Logger: the logging instance
876     :param tmp_working_dir str: The temporary local directory containing some 
877                                 specific directories or files needed in the 
878                                 source package
879     :return: the dictionary that stores all the archives to add in the source 
880              package. {label : (path_on_local_machine, path_in_archive)}
881     :rtype: dict
882     '''
883     # clean the source directory of all the vcs products, then use the source 
884     # command and thus construct an archive that will not contain the patches
885     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
886     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
887       logger.write(_("\nclean sources\n"))
888       args_clean = config.VARS.application
889       args_clean += " --sources --products "
890       args_clean += ",".join(l_prod_names)
891       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
892       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
893     if True:
894       # source
895       logger.write(_("get sources\n"))
896       args_source = config.VARS.application
897       args_source += " --products "
898       args_source += ",".join(l_prod_names)
899       svgDir = sat.cfg.APPLICATION.workdir
900       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
901       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
902       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
903       # DBG.write("sat config id", id(sat.cfg), True)
904       # shit as config is not same id() as for sat.source()
905       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
906       import source
907       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
908       
909       # make the new archives
910       d_archives_vcs = {}
911       for pn, pinfo in l_pinfo_vcs:
912           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
913           logger.write("make archive vcs '%s'\n" % path_archive)
914           d_archives_vcs[pn] = (path_archive,
915                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
916       sat.cfg.APPLICATION.workdir = svgDir
917       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
918     return d_archives_vcs
919
920 def make_archive(prod_name, prod_info, where):
921     '''Create an archive of a product by searching its source directory.
922
923     :param prod_name str: The name of the product.
924     :param prod_info Config: The specific configuration corresponding to the 
925                              product
926     :param where str: The path of the repository where to put the resulting 
927                       archive
928     :return: The path of the resulting archive
929     :rtype: str
930     '''
931     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
932     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
933     local_path = prod_info.source_dir
934     tar_prod.add(local_path,
935                  arcname=prod_name,
936                  exclude=exclude_VCS_and_extensions)
937     tar_prod.close()
938     return path_targz_prod       
939
940 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
941     '''Create a specific project for a source package.
942
943     :param config Config: The global configuration.
944     :param tmp_working_dir str: The temporary local directory containing some 
945                                 specific directories or files needed in the 
946                                 source package
947     :param with_vcs boolean: True if the package is with vcs products (not 
948                              transformed into archive products)
949     :param with_ftp boolean: True if the package use ftp servers to get archives
950     :return: The dictionary 
951              {"project" : (produced project, project path in the archive)}
952     :rtype: Dict
953     '''
954
955     # Create in the working temporary directory the full project tree
956     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
957     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
958                                          "products")
959     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
960                                          "products",
961                                          "compil_scripts")
962     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
963                                          "products",
964                                          "env_scripts")
965     patches_tmp_dir = os.path.join(project_tmp_dir,
966                                          "products",
967                                          "patches")
968     application_tmp_dir = os.path.join(project_tmp_dir,
969                                          "applications")
970     for directory in [project_tmp_dir,
971                       compil_scripts_tmp_dir,
972                       env_scripts_tmp_dir,
973                       patches_tmp_dir,
974                       application_tmp_dir]:
975         src.ensure_path_exists(directory)
976
977     # Create the pyconf that contains the information of the project
978     project_pyconf_name = "project.pyconf"        
979     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
980     ff = open(project_pyconf_file, "w")
981     ff.write(PROJECT_TEMPLATE)
982     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
983         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
984         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
985             ftp_path=ftp_path+":"+ftpserver
986         ftp_path+='"'
987         ff.write("# ftp servers where to search for prerequisite archives\n")
988         ff.write(ftp_path)
989     # add licence paths if any
990     if len(config.PATHS.LICENCEPATH) > 0:  
991         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
992         for path in config.PATHS.LICENCEPATH[1:]:
993             licence_path=licence_path+":"+path
994         licence_path+='"'
995         ff.write("\n# Where to search for licences\n")
996         ff.write(licence_path)
997         
998
999     ff.close()
1000     
1001     # Loop over the products to get there pyconf and all the scripts 
1002     # (compilation, environment, patches)
1003     # and create the pyconf file to add to the project
1004     lproducts_name = config.APPLICATION.products.keys()
1005     l_products = src.product.get_products_infos(lproducts_name, config)
1006     for p_name, p_info in l_products:
1007         # skip product with property not_in_package set to yes
1008         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1009             continue  
1010         find_product_scripts_and_pyconf(p_name,
1011                                         p_info,
1012                                         config,
1013                                         with_vcs,
1014                                         compil_scripts_tmp_dir,
1015                                         env_scripts_tmp_dir,
1016                                         patches_tmp_dir,
1017                                         products_pyconf_tmp_dir)
1018     
1019     find_application_pyconf(config, application_tmp_dir)
1020     
1021     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1022     return d_project
1023
1024 def find_product_scripts_and_pyconf(p_name,
1025                                     p_info,
1026                                     config,
1027                                     with_vcs,
1028                                     compil_scripts_tmp_dir,
1029                                     env_scripts_tmp_dir,
1030                                     patches_tmp_dir,
1031                                     products_pyconf_tmp_dir):
1032     '''Create a specific pyconf file for a given product. Get its environment 
1033        script, its compilation script and patches and put it in the temporary
1034        working directory. This method is used in the source package in order to
1035        construct the specific project.
1036
1037     :param p_name str: The name of the product.
1038     :param p_info Config: The specific configuration corresponding to the 
1039                              product
1040     :param config Config: The global configuration.
1041     :param with_vcs boolean: True if the package is with vcs products (not 
1042                              transformed into archive products)
1043     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1044                                        scripts directory of the project.
1045     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1046                                     directory of the project.
1047     :param patches_tmp_dir str: The path to the temporary patch scripts 
1048                                 directory of the project.
1049     :param products_pyconf_tmp_dir str: The path to the temporary product 
1050                                         scripts directory of the project.
1051     '''
1052     
1053     # read the pyconf of the product
1054     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1055
1056     # find the compilation script if any
1057     if src.product.product_has_script(p_info):
1058         compil_script_path = src.Path(p_info.compil_script)
1059         compil_script_path.copy(compil_scripts_tmp_dir)
1060
1061     # find the environment script if any
1062     if src.product.product_has_env_script(p_info):
1063         env_script_path = src.Path(p_info.environ.env_script)
1064         env_script_path.copy(env_scripts_tmp_dir)
1065
1066     # find the patches if any
1067     if src.product.product_has_patches(p_info):
1068         patches = src.pyconf.Sequence()
1069         for patch_path in p_info.patches:
1070             p_path = src.Path(patch_path)
1071             p_path.copy(patches_tmp_dir)
1072             patches.append(os.path.basename(patch_path), "")
1073
1074     if (not with_vcs) and src.product.product_is_vcs(p_info):
1075         # in non vcs mode, if the product is not archive, then make it become archive.
1076
1077         # depending upon the incremental mode, select impacted sections
1078         if "properties" in p_info and "incremental" in p_info.properties and\
1079             p_info.properties.incremental == "yes":
1080             sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1081         else:
1082             sections = [p_info.section]
1083         for section in sections:
1084             if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1085                 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1086                           (p_name,section))
1087                 product_pyconf_cfg[section].get_source = "archive"
1088                 if not "archive_info" in product_pyconf_cfg[section]:
1089                     product_pyconf_cfg[section].addMapping("archive_info",
1090                                         src.pyconf.Mapping(product_pyconf_cfg),
1091                                         "")
1092                     product_pyconf_cfg[section].archive_info.archive_name =\
1093                         p_info.name + ".tgz"
1094     
1095     # write the pyconf file to the temporary project location
1096     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1097                                            p_name + ".pyconf")
1098     ff = open(product_tmp_pyconf_path, 'w')
1099     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1100     product_pyconf_cfg.__save__(ff, 1)
1101     ff.close()
1102
1103 def find_application_pyconf(config, application_tmp_dir):
1104     '''Find the application pyconf file and put it in the specific temporary 
1105        directory containing the specific project of a source package.
1106
1107     :param config Config: The global configuration.
1108     :param application_tmp_dir str: The path to the temporary application 
1109                                        scripts directory of the project.
1110     '''
1111     # read the pyconf of the application
1112     application_name = config.VARS.application
1113     application_pyconf_path = src.find_file_in_lpath(
1114                                             application_name + ".pyconf",
1115                                             config.PATHS.APPLICATIONPATH)
1116     application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1117     
1118     # Change the workdir
1119     application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1120                                     application_pyconf_cfg,
1121                                     src.pyconf.DOLLAR,
1122                                     'VARS.salometoolsway + $VARS.sep + ".."')
1123
1124     # Prevent from compilation in base
1125     application_pyconf_cfg.APPLICATION.base = "no"
1126     
1127     #remove products that are not in config (which were filtered by --without_properties)
1128     for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1129         if product_name not in config.APPLICATION.products.keys():
1130             application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1131
1132     # write the pyconf file to the temporary application location
1133     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1134                                                application_name + ".pyconf")
1135
1136     ff = open(application_tmp_pyconf_path, 'w')
1137     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1138     application_pyconf_cfg.__save__(ff, 1)
1139     ff.close()
1140
1141 def sat_package(config, tmp_working_dir, options, logger):
1142     '''Prepare a dictionary that stores all the needed directories and files to
1143        add in a salomeTool package.
1144     
1145     :param tmp_working_dir str: The temporary local working directory 
1146     :param options OptResult: the options of the launched command
1147     :return: the dictionary that stores all the needed directories and files to
1148              add in a salomeTool package.
1149              {label : (path_on_local_machine, path_in_archive)}
1150     :rtype: dict
1151     '''
1152     d_project = {}
1153
1154     # we include sat himself
1155     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1156
1157     # and we overwrite local.pyconf with a clean wersion.
1158     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1159     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1160     local_cfg = src.pyconf.Config(local_file_path)
1161     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1162     local_cfg.LOCAL["base"] = "default"
1163     local_cfg.LOCAL["workdir"] = "default"
1164     local_cfg.LOCAL["log_dir"] = "default"
1165     local_cfg.LOCAL["archive_dir"] = "default"
1166     local_cfg.LOCAL["VCS"] = "None"
1167     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1168
1169     # if the archive contains a project, we write its relative path in local.pyconf
1170     if options.project:
1171         project_arch_path = os.path.join("projects", options.project, 
1172                                          os.path.basename(options.project_file_path))
1173         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1174
1175     ff = open(local_pyconf_tmp_path, 'w')
1176     local_cfg.__save__(ff, 1)
1177     ff.close()
1178     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1179     return d_project
1180     
1181
1182 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1183     '''Prepare a dictionary that stores all the needed directories and files to
1184        add in a project package.
1185     
1186     :param project_file_path str: The path to the local project.
1187     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1188     :param tmp_working_dir str: The temporary local directory containing some 
1189                                 specific directories or files needed in the 
1190                                 project package
1191     :param embedded_in_sat boolean : the project package is embedded in a sat package
1192     :return: the dictionary that stores all the needed directories and files to
1193              add in a project package.
1194              {label : (path_on_local_machine, path_in_archive)}
1195     :rtype: dict
1196     '''
1197     d_project = {}
1198     # Read the project file and get the directories to add to the package
1199     
1200     try: 
1201       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1202     except:
1203       logger.write("""
1204 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1205       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1206       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1207     
1208     paths = {"APPLICATIONPATH" : "applications",
1209              "PRODUCTPATH" : "products",
1210              "JOBPATH" : "jobs",
1211              "MACHINEPATH" : "machines"}
1212     if not ftp_mode:
1213         paths["ARCHIVEPATH"] = "archives"
1214
1215     # Loop over the project paths and add it
1216     project_file_name = os.path.basename(project_file_path)
1217     for path in paths:
1218         if path not in project_pyconf_cfg:
1219             continue
1220         if embedded_in_sat:
1221             dest_path = os.path.join("projects", name_project, paths[path])
1222             project_file_dest = os.path.join("projects", name_project, project_file_name)
1223         else:
1224             dest_path = paths[path]
1225             project_file_dest = project_file_name
1226
1227         # Add the directory to the files to add in the package
1228         d_project[path] = (project_pyconf_cfg[path], dest_path)
1229
1230         # Modify the value of the path in the package
1231         project_pyconf_cfg[path] = src.pyconf.Reference(
1232                                     project_pyconf_cfg,
1233                                     src.pyconf.DOLLAR,
1234                                     'project_path + "/' + paths[path] + '"')
1235     
1236     # Modify some values
1237     if "project_path" not in project_pyconf_cfg:
1238         project_pyconf_cfg.addMapping("project_path",
1239                                       src.pyconf.Mapping(project_pyconf_cfg),
1240                                       "")
1241     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1242                                                            src.pyconf.DOLLAR,
1243                                                            'PWD')
1244     # we don't want to export these two fields
1245     project_pyconf_cfg.__delitem__("file_path")
1246     project_pyconf_cfg.__delitem__("PWD")
1247     if ftp_mode:
1248         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1249     
1250     # Write the project pyconf file
1251     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1252     ff = open(project_pyconf_tmp_path, 'w')
1253     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1254     project_pyconf_cfg.__save__(ff, 1)
1255     ff.close()
1256     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1257     
1258     return d_project
1259
1260 def add_readme(config, options, where):
1261     readme_path = os.path.join(where, "README")
1262     with codecs.open(readme_path, "w", 'utf-8') as f:
1263
1264     # templates for building the header
1265         readme_header="""
1266 # This package was generated with sat $version
1267 # Date: $date
1268 # User: $user
1269 # Distribution : $dist
1270
1271 In the following, $$ROOT represents the directory where you have installed 
1272 SALOME (the directory where this file is located).
1273
1274 """
1275         if src.architecture.is_windows():
1276             readme_header = readme_header.replace('$$ROOT','%ROOT%')
1277         readme_compilation_with_binaries="""
1278
1279 compilation based on the binaries used as prerequisites
1280 =======================================================
1281
1282 If you fail to compile the complete application (for example because
1283 you are not root on your system and cannot install missing packages), you
1284 may try a partial compilation based on the binaries.
1285 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1286 and do some substitutions on cmake and .la files (replace the build directories
1287 with local paths).
1288 The procedure to do it is:
1289  1) Remove or rename INSTALL directory if it exists
1290  2) Execute the shell script install_bin.sh:
1291  > cd $ROOT
1292  > ./install_bin.sh
1293  3) Use SalomeTool (as explained in Sources section) and compile only the 
1294     modules you need to (with -p option)
1295
1296 """
1297         readme_header_tpl=string.Template(readme_header)
1298         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1299                 "README_BIN.template")
1300         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1301                 "README_LAUNCHER.template")
1302         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1303                 "README_BIN_VIRTUAL_APP.template")
1304         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1305                 "README_SRC.template")
1306         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1307                 "README_PROJECT.template")
1308         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1309                 "README_SAT.template")
1310
1311         # prepare substitution dictionary
1312         d = dict()
1313         d['user'] = config.VARS.user
1314         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1315         d['version'] = src.get_salometool_version(config)
1316         d['dist'] = config.VARS.dist
1317         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1318
1319         if options.binaries or options.sources:
1320             d['application'] = config.VARS.application
1321             d['BINARIES']    = config.INTERNAL.config.install_dir
1322             d['SEPARATOR'] = config.VARS.sep
1323             if src.architecture.is_windows():
1324                 d['operatingSystem'] = 'Windows'
1325                 d['PYTHON3'] = 'python3'
1326                 d['ROOT']    = '%ROOT%'
1327             else:
1328                 d['operatingSystem'] = 'Linux'
1329                 d['PYTHON3'] = ''
1330                 d['ROOT']    = '$ROOT'
1331             f.write("# Application: " + d['application'] + "\n")
1332             if 'KERNEL' in config.APPLICATION.products:
1333                 VersionSalome = src.get_salome_version(config)
1334                 # Case where SALOME has the launcher that uses the SalomeContext API
1335                 if VersionSalome >= 730:
1336                     d['launcher'] = config.APPLICATION.profile.launcher_name
1337                 else:
1338                     d['virtual_app'] = 'runAppli' # this info is not used now)
1339
1340         # write the specific sections
1341         if options.binaries:
1342             f.write(src.template.substitute(readme_template_path_bin, d))
1343             if "virtual_app" in d:
1344                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1345             if "launcher" in d:
1346                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1347
1348         if options.sources:
1349             f.write(src.template.substitute(readme_template_path_src, d))
1350
1351         if options.binaries and options.sources and not src.architecture.is_windows():
1352             f.write(readme_compilation_with_binaries)
1353
1354         if options.project:
1355             f.write(src.template.substitute(readme_template_path_pro, d))
1356
1357         if options.sat:
1358             f.write(src.template.substitute(readme_template_path_sat, d))
1359     
1360     return readme_path
1361
1362 def update_config(config, prop, value):
1363     '''Remove from config.APPLICATION.products the products that have the property given as input.
1364     
1365     :param config Config: The global config.
1366     :param prop str: The property to filter
1367     :param value str: The value of the property to filter
1368     '''
1369     # if there is no APPLICATION (ex sat package -t) : nothing to do
1370     if "APPLICATION" in config:
1371         l_product_to_remove = []
1372         for product_name in config.APPLICATION.products.keys():
1373             prod_cfg = src.product.get_product_config(config, product_name)
1374             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1375                 l_product_to_remove.append(product_name)
1376         for product_name in l_product_to_remove:
1377             config.APPLICATION.products.__delitem__(product_name)
1378
1379 def description():
1380     '''method that is called when salomeTools is called with --help option.
1381     
1382     :return: The text to display for the package command description.
1383     :rtype: str
1384     '''
1385     return _("""
1386 The package command creates a tar file archive of a product.
1387 There are four kinds of archive, which can be mixed:
1388
1389  1 - The binary archive. 
1390      It contains the product installation directories plus a launcher.
1391  2 - The sources archive. 
1392      It contains the product archives, a project (the application plus salomeTools).
1393  3 - The project archive. 
1394      It contains a project (give the project file path as argument).
1395  4 - The salomeTools archive. 
1396      It contains code utility salomeTools.
1397
1398 example:
1399  >> sat package SALOME-master --binaries --sources""")
1400   
1401 def run(args, runner, logger):
1402     '''method that is called when salomeTools is called with package parameter.
1403     '''
1404     
1405     # Parse the options
1406     (options, args) = parser.parse_args(args)
1407
1408     # Check that a type of package is called, and only one
1409     all_option_types = (options.binaries,
1410                         options.sources,
1411                         options.project not in ["", None],
1412                         options.sat)
1413
1414     # Check if no option for package type
1415     if all_option_types.count(True) == 0:
1416         msg = _("Error: Precise a type for the package\nUse one of the "
1417                 "following options: --binaries, --sources, --project or"
1418                 " --salometools")
1419         logger.write(src.printcolors.printcError(msg), 1)
1420         logger.write("\n", 1)
1421         return 1
1422     
1423     # The repository where to put the package if not Binary or Source
1424     package_default_path = runner.cfg.LOCAL.workdir
1425     
1426     # if the package contains binaries or sources:
1427     if options.binaries or options.sources:
1428         # Check that the command has been called with an application
1429         src.check_config_has_application(runner.cfg)
1430
1431         # Display information
1432         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1433                                                     runner.cfg.VARS.application), 1)
1434         
1435         # Get the default directory where to put the packages
1436         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1437         src.ensure_path_exists(package_default_path)
1438         
1439     # if the package contains a project:
1440     if options.project:
1441         # check that the project is visible by SAT
1442         projectNameFile = options.project + ".pyconf"
1443         foundProject = None
1444         for i in runner.cfg.PROJECTS.project_file_paths:
1445             baseName = os.path.basename(i)
1446             if baseName == projectNameFile:
1447                 foundProject = i
1448                 break
1449
1450         if foundProject is None:
1451             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1452             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1453 known projects are:
1454 %(2)s
1455
1456 Please add it in file:
1457 %(3)s""" % \
1458                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1459             logger.write(src.printcolors.printcError(msg), 1)
1460             logger.write("\n", 1)
1461             return 1
1462         else:
1463             options.project_file_path = foundProject
1464             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1465     
1466     # Remove the products that are filtered by the --without_properties option
1467     if options.without_properties:
1468         app = runner.cfg.APPLICATION
1469         logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1470         prop, value = options.without_properties
1471         update_config(runner.cfg, prop, value)
1472         logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1473
1474     # Remove from config the products that have the not_in_package property
1475     update_config(runner.cfg, "not_in_package", "yes")
1476     
1477     # get the name of the archive or build it
1478     if options.name:
1479         if os.path.basename(options.name) == options.name:
1480             # only a name (not a path)
1481             archive_name = options.name           
1482             dir_name = package_default_path
1483         else:
1484             archive_name = os.path.basename(options.name)
1485             dir_name = os.path.dirname(options.name)
1486         
1487         # suppress extension
1488         if archive_name[-len(".tgz"):] == ".tgz":
1489             archive_name = archive_name[:-len(".tgz")]
1490         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1491             archive_name = archive_name[:-len(".tar.gz")]
1492         
1493     else:
1494         archive_name=""
1495         dir_name = package_default_path
1496         if options.binaries or options.sources:
1497             archive_name = runner.cfg.APPLICATION.name
1498
1499         if options.binaries:
1500             archive_name += "-"+runner.cfg.VARS.dist
1501             
1502         if options.sources:
1503             archive_name += "-SRC"
1504             if options.with_vcs:
1505                 archive_name += "-VCS"
1506
1507         if options.sat:
1508             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1509
1510         if options.project:
1511             if options.sat:
1512                 archive_name += "_" 
1513             project_name = options.project
1514             archive_name += ("satproject_" + project_name)
1515  
1516         if len(archive_name)==0: # no option worked 
1517             msg = _("Error: Cannot name the archive\n"
1518                     " check if at least one of the following options was "
1519                     "selected : --binaries, --sources, --project or"
1520                     " --salometools")
1521             logger.write(src.printcolors.printcError(msg), 1)
1522             logger.write("\n", 1)
1523             return 1
1524  
1525     path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1526     
1527     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1528
1529     # Create a working directory for all files that are produced during the
1530     # package creation and that will be removed at the end of the command
1531     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1532     src.ensure_path_exists(tmp_working_dir)
1533     logger.write("\n", 5)
1534     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1535     
1536     logger.write("\n", 3)
1537
1538     msg = _("Preparation of files to add to the archive")
1539     logger.write(src.printcolors.printcLabel(msg), 2)
1540     logger.write("\n", 2)
1541     
1542     d_files_to_add={}  # content of the archive
1543
1544     # a dict to hold paths that will need to be substitute for users recompilations
1545     d_paths_to_substitute={}  
1546
1547     if options.binaries:
1548         d_bin_files_to_add = binary_package(runner.cfg,
1549                                             logger,
1550                                             options,
1551                                             tmp_working_dir)
1552         # for all binaries dir, store the substitution that will be required 
1553         # for extra compilations
1554         for key in d_bin_files_to_add:
1555             if key.endswith("(bin)"):
1556                 source_dir = d_bin_files_to_add[key][0]
1557                 path_in_archive = d_bin_files_to_add[key][1].replace(
1558                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1559                    runner.cfg.INTERNAL.config.install_dir)
1560                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1561                     # if basename is the same we will just substitute the dirname 
1562                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1563                         os.path.dirname(path_in_archive)
1564                 else:
1565                     d_paths_to_substitute[source_dir]=path_in_archive
1566
1567         d_files_to_add.update(d_bin_files_to_add)
1568     if options.sources:
1569         d_files_to_add.update(source_package(runner,
1570                                         runner.cfg,
1571                                         logger, 
1572                                         options,
1573                                         tmp_working_dir))
1574         if options.binaries:
1575             # for archives with bin and sources we provide a shell script able to 
1576             # install binaries for compilation
1577             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1578                                                       tmp_working_dir,
1579                                                       d_paths_to_substitute,
1580                                                       "install_bin.sh")
1581             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1582             logger.write("substitutions that need to be done later : \n", 5)
1583             logger.write(str(d_paths_to_substitute), 5)
1584             logger.write("\n", 5)
1585     else:
1586         # --salomeTool option is not considered when --sources is selected, as this option
1587         # already brings salomeTool!
1588         if options.sat:
1589             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1590                                   options, logger))
1591         
1592     if options.project:
1593         DBG.write("config for package %s" % project_name, runner.cfg)
1594         d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1595
1596     if not(d_files_to_add):
1597         msg = _("Error: Empty dictionnary to build the archive!\n")
1598         logger.write(src.printcolors.printcError(msg), 1)
1599         logger.write("\n", 1)
1600         return 1
1601
1602     # Add the README file in the package
1603     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1604     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1605
1606     # Add the additional files of option add_files
1607     if options.add_files:
1608         for file_path in options.add_files:
1609             if not os.path.exists(file_path):
1610                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1611                 continue
1612             file_name = os.path.basename(file_path)
1613             d_files_to_add[file_name] = (file_path, file_name)
1614
1615     logger.write("\n", 2)
1616     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1617     logger.write("\n", 2)
1618     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1619
1620     res = 0
1621     try:
1622         # Creating the object tarfile
1623         tar = tarfile.open(path_targz, mode='w:gz')
1624         
1625         # get the filtering function if needed
1626         filter_function = exclude_VCS_and_extensions
1627
1628         # Add the files to the tarfile object
1629         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1630         tar.close()
1631     except KeyboardInterrupt:
1632         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1633         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1634         # remove the working directory
1635         shutil.rmtree(tmp_working_dir)
1636         logger.write(_("OK"), 1)
1637         logger.write(_("\n"), 1)
1638         return 1
1639     
1640     # case if no application, only package sat as 'sat package -t'
1641     try:
1642         app = runner.cfg.APPLICATION
1643     except:
1644         app = None
1645
1646     # unconditionaly remove the tmp_local_working_dir
1647     if app is not None:
1648         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1649         if os.path.isdir(tmp_local_working_dir):
1650             shutil.rmtree(tmp_local_working_dir)
1651
1652     # remove the tmp directory, unless user has registered as developer
1653     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1654         shutil.rmtree(tmp_working_dir)
1655     
1656     # Print again the path of the package
1657     logger.write("\n", 2)
1658     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1659     
1660     return res