Salome HOME
unification des launcher python et exe, pour passage au mode exe du lanceur salome
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 BINARY = "binary"
35 SOURCE = "Source"
36 PROJECT = "Project"
37 SAT = "Sat"
38
39 ARCHIVE_DIR = "ARCHIVES"
40 PROJECT_DIR = "PROJECT"
41
42 IGNORED_DIRS = [".git", ".svn"]
43 IGNORED_EXTENSIONS = []
44
45 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
46
47 PROJECT_TEMPLATE = """#!/usr/bin/env python
48 #-*- coding:utf-8 -*-
49
50 # The path to the archive root directory
51 root_path : $PWD + "/../"
52 # path to the PROJECT
53 project_path : $PWD + "/"
54
55 # Where to search the archives of the products
56 ARCHIVEPATH : $root_path + "ARCHIVES"
57 # Where to search the pyconf of the applications
58 APPLICATIONPATH : $project_path + "applications/"
59 # Where to search the pyconf of the products
60 PRODUCTPATH : $project_path + "products/"
61 # Where to search the pyconf of the jobs of the project
62 JOBPATH : $project_path + "jobs/"
63 # Where to search the pyconf of the machines of the project
64 MACHINEPATH : $project_path + "machines/"
65 """
66
67 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
68 #-*- coding:utf-8 -*-
69
70   LOCAL :
71   {
72     base : 'default'
73     workdir : 'default'
74     log_dir : 'default'
75     archive_dir : 'default'
76     VCS : None
77     tag : None
78   }
79
80 PROJECTS :
81 {
82 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
83 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
84 }
85 """)
86
87 # Define all possible option for the package command :  sat package <options>
88 parser = src.options.Options()
89 parser.add_option('b', 'binaries', 'boolean', 'binaries',
90     _('Optional: Produce a binary package.'), False)
91 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
92     _('Optional: Only binary package: produce the archive even if '
93       'there are some missing products.'), False)
94 parser.add_option('s', 'sources', 'boolean', 'sources',
95     _('Optional: Produce a compilable archive of the sources of the '
96       'application.'), False)
97 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
98     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
99       'Sat prepare will use VCS mode instead to retrieve them'),
100     False)
101 parser.add_option('', 'ftp', 'boolean', 'ftp',
102     _('Optional: Do not embed archives for products in archive mode.' 
103     'Sat prepare will use ftp instead to retrieve them'),
104     False)
105 parser.add_option('p', 'project', 'string', 'project',
106     _('Optional: Produce an archive that contains a project.'), "")
107 parser.add_option('t', 'salometools', 'boolean', 'sat',
108     _('Optional: Produce an archive that contains salomeTools.'), False)
109 parser.add_option('n', 'name', 'string', 'name',
110     _('Optional: The name or full path of the archive.'), None)
111 parser.add_option('', 'add_files', 'list2', 'add_files',
112     _('Optional: The list of additional files to add to the archive.'), [])
113 parser.add_option('', 'without_properties', 'properties', 'without_properties',
114     _('Optional: Filter the products by their properties.\n\tSyntax: '
115       '--without_properties <property>:<value>'))
116
117
118 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
119     '''Create an archive containing all directories and files that are given in
120        the d_content argument.
121     
122     :param tar tarfile: The tarfile instance used to make the archive.
123     :param name_archive str: The name of the archive to make.
124     :param d_content dict: The dictionary that contain all directories and files
125                            to add in the archive.
126                            d_content[label] = 
127                                         (path_on_local_machine, path_in_archive)
128     :param logger Logger: the logging instance
129     :param f_exclude Function: the function that filters
130     :return: 0 if success, 1 if not.
131     :rtype: int
132     '''
133     # get the max length of the messages in order to make the display
134     max_len = len(max(d_content.keys(), key=len))
135     
136     success = 0
137     # loop over each directory or file stored in the d_content dictionary
138     names = sorted(d_content.keys())
139     DBG.write("add tar names", names)
140
141     # used to avoid duplications (for pip install in python, or single_install_dir cases)
142     already_added=set() 
143     for name in names:
144         # display information
145         len_points = max_len - len(name) + 3
146         local_path, archive_path = d_content[name]
147         in_archive = os.path.join(name_archive, archive_path)
148         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
149         # Get the local path and the path in archive 
150         # of the directory or file to add
151         # Add it in the archive
152         try:
153             key=local_path+"->"+in_archive
154             if key not in already_added:
155                 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
156                 already_added.add(key)
157             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
158         except Exception as e:
159             logger.write(src.printcolors.printcError(_("KO ")), 3)
160             logger.write(str(e), 3)
161             success = 1
162         logger.write("\n", 3)
163     return success
164
165 def exclude_VCS_and_extensions(filename):
166     ''' The function that is used to exclude from package the link to the 
167         VCS repositories (like .git)
168
169     :param filename Str: The filname to exclude (or not).
170     :return: True if the file has to be exclude
171     :rtype: Boolean
172     '''
173     for dir_name in IGNORED_DIRS:
174         if dir_name in filename:
175             return True
176     for extension in IGNORED_EXTENSIONS:
177         if filename.endswith(extension):
178             return True
179     return False
180
181 def produce_relative_launcher(config,
182                               logger,
183                               file_dir,
184                               file_name,
185                               binaries_dir_name):
186     '''Create a specific SALOME launcher for the binary package. This launcher 
187        uses relative paths.
188     
189     :param config Config: The global configuration.
190     :param logger Logger: the logging instance
191     :param file_dir str: the directory where to put the launcher
192     :param file_name str: The launcher name
193     :param binaries_dir_name str: the name of the repository where the binaries
194                                   are, in the archive.
195     :return: the path of the produced launcher
196     :rtype: str
197     '''
198     
199     # get KERNEL installation path 
200     kernel_info = src.product.get_product_config(config, "KERNEL")
201     kernel_base_name=os.path.basename(kernel_info.install_dir)
202     if kernel_base_name.startswith("config"):
203         # case of kernel installed in base. We remove "config-i"
204         kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
205     
206     kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
207
208     # set kernel bin dir (considering fhs property)
209     kernel_cfg = src.product.get_product_config(config, "KERNEL")
210     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
211         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
212     else:
213         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
214
215     # check if the application contains an application module
216     # check also if the application has a distene product, 
217     # in this case get its licence file name
218     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
219     salome_application_name="Not defined" 
220     distene_licence_file_name=False
221     for prod_name, prod_info in l_product_info:
222         # look for a "salome application" and a distene product
223         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
224             distene_licence_file_name = src.product.product_has_licence(prod_info, 
225                                             config.PATHS.LICENCEPATH) 
226         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
227             salome_application_name=prod_info.name
228
229     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
230     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
231     if salome_application_name == "Not defined":
232         app_root_dir=kernel_root_dir
233     else:
234         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
235
236     additional_env={}
237     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
238                                                    config.VARS.sep + bin_kernel_install_dir
239     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
240         additional_env['sat_python_version'] = 3
241     else:
242         additional_env['sat_python_version'] = 2
243
244     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
245
246     # create an environment file writer
247     writer = src.environment.FileEnvWriter(config,
248                                            logger,
249                                            file_dir,
250                                            src_root=None,
251                                            env_info=None)
252     
253     filepath = os.path.join(file_dir, file_name)
254     # Write
255     writer.write_env_file(filepath,
256                           False,  # for launch
257                           "cfgForPy",
258                           additional_env=additional_env,
259                           no_path_init="False",
260                           for_package = binaries_dir_name)
261     
262     # Little hack to put out_dir_Path outside the strings
263     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
264     src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
265     
266     # A hack to put a call to a file for distene licence.
267     # It does nothing to an application that has no distene product
268     if distene_licence_file_name:
269         logger.write("Application has a distene licence file! We use it in package launcher", 5)
270         hack_for_distene_licence(filepath, distene_licence_file_name)
271        
272     # change the rights in order to make the file executable for everybody
273     os.chmod(filepath,
274              stat.S_IRUSR |
275              stat.S_IRGRP |
276              stat.S_IROTH |
277              stat.S_IWUSR |
278              stat.S_IXUSR |
279              stat.S_IXGRP |
280              stat.S_IXOTH)
281
282     return filepath
283
284 def hack_for_distene_licence(filepath, licence_file):
285     '''Replace the distene licence env variable by a call to a file.
286     
287     :param filepath Str: The path to the launcher to modify.
288     '''  
289     shutil.move(filepath, filepath + "_old")
290     fileout= filepath
291     filein = filepath + "_old"
292     fin = open(filein, "r")
293     fout = open(fileout, "w")
294     text = fin.readlines()
295     # Find the Distene section
296     num_line = -1
297     for i,line in enumerate(text):
298         if "# Set DISTENE License" in line:
299             num_line = i
300             break
301     if num_line == -1:
302         # No distene product, there is nothing to do
303         fin.close()
304         for line in text:
305             fout.write(line)
306         fout.close()
307         return
308     del text[num_line +1]
309     del text[num_line +1]
310     text_to_insert ="""    try:
311         distene_licence_file=r"%s"
312         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
313             import importlib.util
314             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
315             distene=importlib.util.module_from_spec(spec_dist)
316             spec_dist.loader.exec_module(distene)
317         else:
318             import imp
319             distene = imp.load_source('distene_licence', distene_licence_file)
320         distene.set_distene_variables(context)
321     except:
322         pass\n"""  % licence_file
323     text.insert(num_line + 1, text_to_insert)
324     for line in text:
325         fout.write(line)
326     fin.close()    
327     fout.close()
328     return
329     
330 def produce_relative_env_files(config,
331                               logger,
332                               file_dir,
333                               binaries_dir_name):
334     '''Create some specific environment files for the binary package. These 
335        files use relative paths.
336     
337     :param config Config: The global configuration.
338     :param logger Logger: the logging instance
339     :param file_dir str: the directory where to put the files
340     :param binaries_dir_name str: the name of the repository where the binaries
341                                   are, in the archive.
342     :return: the list of path of the produced environment files
343     :rtype: List
344     '''  
345     # create an environment file writer
346     writer = src.environment.FileEnvWriter(config,
347                                            logger,
348                                            file_dir,
349                                            src_root=None)
350     
351     if src.architecture.is_windows():
352       shell = "bat"
353       filename  = "env_launch.bat"
354     else:
355       shell = "bash"
356       filename  = "env_launch.sh"
357
358     # Write
359     filepath = writer.write_env_file(filename,
360                           False, # for launch
361                           shell,
362                           for_package = binaries_dir_name)
363
364     # Little hack to put out_dir_Path as environment variable
365     if src.architecture.is_windows() :
366       src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
367       src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
368     else:
369       src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
370
371     # change the rights in order to make the file executable for everybody
372     os.chmod(filepath,
373              stat.S_IRUSR |
374              stat.S_IRGRP |
375              stat.S_IROTH |
376              stat.S_IWUSR |
377              stat.S_IXUSR |
378              stat.S_IXGRP |
379              stat.S_IXOTH)
380     
381     return filepath
382
383 def produce_install_bin_file(config,
384                              logger,
385                              file_dir,
386                              d_sub,
387                              file_name):
388     '''Create a bash shell script which do substitutions in BIRARIES dir 
389        in order to use it for extra compilations.
390     
391     :param config Config: The global configuration.
392     :param logger Logger: the logging instance
393     :param file_dir str: the directory where to put the files
394     :param d_sub, dict: the dictionnary that contains the substitutions to be done
395     :param file_name str: the name of the install script file
396     :return: the produced file
397     :rtype: str
398     '''  
399     # Write
400     filepath = os.path.join(file_dir, file_name)
401     # open the file and write into it
402     # use codec utf-8 as sat variables are in unicode
403     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
404         installbin_template_path = os.path.join(config.VARS.internal_dir,
405                                         "INSTALL_BIN.template")
406         
407         # build the name of the directory that will contain the binaries
408         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
409         # build the substitution loop
410         loop_cmd = "for f in $(grep -RIl"
411         for key in d_sub:
412             loop_cmd += " -e "+ key
413         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
414                     '); do\n     sed -i "\n'
415         for key in d_sub:
416             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
417         loop_cmd += '            " $f\ndone'
418
419         d={}
420         d["BINARIES_DIR"] = binaries_dir_name
421         d["SUBSTITUTION_LOOP"]=loop_cmd
422         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
423         
424         # substitute the template and write it in file
425         content=src.template.substitute(installbin_template_path, d)
426         installbin_file.write(content)
427         # change the rights in order to make the file executable for everybody
428         os.chmod(filepath,
429                  stat.S_IRUSR |
430                  stat.S_IRGRP |
431                  stat.S_IROTH |
432                  stat.S_IWUSR |
433                  stat.S_IXUSR |
434                  stat.S_IXGRP |
435                  stat.S_IXOTH)
436     
437     return filepath
438
439 def product_appli_creation_script(config,
440                                   logger,
441                                   file_dir,
442                                   binaries_dir_name):
443     '''Create a script that can produce an application (EDF style) in the binary
444        package.
445     
446     :param config Config: The global configuration.
447     :param logger Logger: the logging instance
448     :param file_dir str: the directory where to put the file
449     :param binaries_dir_name str: the name of the repository where the binaries
450                                   are, in the archive.
451     :return: the path of the produced script file
452     :rtype: Str
453     '''
454     template_name = "create_appli.py.for_bin_packages.template"
455     template_path = os.path.join(config.VARS.internal_dir, template_name)
456     text_to_fill = open(template_path, "r").read()
457     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
458                                         '"' + binaries_dir_name + '"')
459     
460     text_to_add = ""
461     for product_name in get_SALOME_modules(config):
462         product_info = src.product.get_product_config(config, product_name)
463        
464         if src.product.product_is_smesh_plugin(product_info):
465             continue
466
467         if 'install_dir' in product_info and bool(product_info.install_dir):
468             if src.product.product_is_cpp(product_info):
469                 # cpp module
470                 for cpp_name in src.product.get_product_components(product_info):
471                     line_to_add = ("<module name=\"" + 
472                                    cpp_name + 
473                                    "\" gui=\"yes\" path=\"''' + "
474                                    "os.path.join(dir_bin_name, \"" + 
475                                    cpp_name + "\") + '''\"/>")
476             else:
477                 # regular module
478                 line_to_add = ("<module name=\"" + 
479                                product_name + 
480                                "\" gui=\"yes\" path=\"''' + "
481                                "os.path.join(dir_bin_name, \"" + 
482                                product_name + "\") + '''\"/>")
483             text_to_add += line_to_add + "\n"
484     
485     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
486     
487     tmp_file_path = os.path.join(file_dir, "create_appli.py")
488     ff = open(tmp_file_path, "w")
489     ff.write(filled_text)
490     ff.close()
491     
492     # change the rights in order to make the file executable for everybody
493     os.chmod(tmp_file_path,
494              stat.S_IRUSR |
495              stat.S_IRGRP |
496              stat.S_IROTH |
497              stat.S_IWUSR |
498              stat.S_IXUSR |
499              stat.S_IXGRP |
500              stat.S_IXOTH)
501     
502     return tmp_file_path
503
504 def binary_package(config, logger, options, tmp_working_dir):
505     '''Prepare a dictionary that stores all the needed directories and files to
506        add in a binary package.
507     
508     :param config Config: The global configuration.
509     :param logger Logger: the logging instance
510     :param options OptResult: the options of the launched command
511     :param tmp_working_dir str: The temporary local directory containing some 
512                                 specific directories or files needed in the 
513                                 binary package
514     :return: the dictionary that stores all the needed directories and files to
515              add in a binary package.
516              {label : (path_on_local_machine, path_in_archive)}
517     :rtype: dict
518     '''
519
520     # Get the list of product installation to add to the archive
521     l_products_name = sorted(config.APPLICATION.products.keys())
522     l_product_info = src.product.get_products_infos(l_products_name,
523                                                     config)
524     l_install_dir = []
525     l_source_dir = []
526     l_not_installed = []
527     l_sources_not_present = []
528     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
529     if ("APPLICATION" in config  and
530         "properties"  in config.APPLICATION  and
531         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
532         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
533             generate_mesa_launcher=True
534
535     for prod_name, prod_info in l_product_info:
536         # skip product with property not_in_package set to yes
537         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
538             continue  
539
540         # Add the sources of the products that have the property 
541         # sources_in_package : "yes"
542         if src.get_property_in_product_cfg(prod_info,
543                                            "sources_in_package") == "yes":
544             if os.path.exists(prod_info.source_dir):
545                 l_source_dir.append((prod_name, prod_info.source_dir))
546             else:
547                 l_sources_not_present.append(prod_name)
548
549         # ignore the native and fixed products for install directories
550         if (src.product.product_is_native(prod_info) 
551                 or src.product.product_is_fixed(prod_info)
552                 or not src.product.product_compiles(prod_info)):
553             continue
554         if src.product.check_installation(config, prod_info):
555             l_install_dir.append((prod_name, prod_info.install_dir))
556         else:
557             l_not_installed.append(prod_name)
558         
559         # Add also the cpp generated modules (if any)
560         if src.product.product_is_cpp(prod_info):
561             # cpp module
562             for name_cpp in src.product.get_product_components(prod_info):
563                 install_dir = os.path.join(config.APPLICATION.workdir,
564                                            config.INTERNAL.config.install_dir,
565                                            name_cpp) 
566                 if os.path.exists(install_dir):
567                     l_install_dir.append((name_cpp, install_dir))
568                 else:
569                     l_not_installed.append(name_cpp)
570         
571     # check the name of the directory that (could) contains the binaries 
572     # from previous detar
573     binaries_from_detar = os.path.join(
574                               config.APPLICATION.workdir,
575                               config.INTERNAL.config.binary_dir + config.VARS.dist)
576     if os.path.exists(binaries_from_detar):
577          logger.write("""
578 WARNING: existing binaries directory from previous detar installation:
579          %s
580          To make new package from this, you have to: 
581          1) install binaries in INSTALL directory with the script "install_bin.sh" 
582             see README file for more details
583          2) or recompile everything in INSTALL with "sat compile" command 
584             this step is long, and requires some linux packages to be installed 
585             on your system\n
586 """ % binaries_from_detar)
587     
588     # Print warning or error if there are some missing products
589     if len(l_not_installed) > 0:
590         text_missing_prods = ""
591         for p_name in l_not_installed:
592             text_missing_prods += " - " + p_name + "\n"
593         if not options.force_creation:
594             msg = _("ERROR: there are missing product installations:")
595             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
596                                      text_missing_prods),
597                          1)
598             raise src.SatException(msg)
599         else:
600             msg = _("WARNING: there are missing products installations:")
601             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
602                                      text_missing_prods),
603                          1)
604
605     # Do the same for sources
606     if len(l_sources_not_present) > 0:
607         text_missing_prods = ""
608         for p_name in l_sources_not_present:
609             text_missing_prods += "-" + p_name + "\n"
610         if not options.force_creation:
611             msg = _("ERROR: there are missing product sources:")
612             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
613                                      text_missing_prods),
614                          1)
615             raise src.SatException(msg)
616         else:
617             msg = _("WARNING: there are missing products sources:")
618             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
619                                      text_missing_prods),
620                          1)
621  
622     # construct the name of the directory that will contain the binaries
623     if src.architecture.is_windows():
624         binaries_dir_name = config.INTERNAL.config.binary_dir
625     else:
626         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
627     # construct the correlation table between the product names, there 
628     # actual install directories and there install directory in archive
629     d_products = {}
630     for prod_name, install_dir in l_install_dir:
631         prod_base_name=os.path.basename(install_dir)
632         if prod_base_name.startswith("config"):
633             # case of a products installed in base. We remove "config-i"
634             prod_base_name=os.path.basename(os.path.dirname(install_dir))
635         path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
636         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
637         
638     for prod_name, source_dir in l_source_dir:
639         path_in_archive = os.path.join("SOURCES", prod_name)
640         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
641
642     # for packages of SALOME applications including KERNEL, 
643     # we produce a salome launcher or a virtual application (depending on salome version)
644     if 'KERNEL' in config.APPLICATION.products:
645         VersionSalome = src.get_salome_version(config)
646         # Case where SALOME has the launcher that uses the SalomeContext API
647         if VersionSalome >= 730:
648             # create the relative launcher and add it to the files to add
649             launcher_name = src.get_launcher_name(config)
650             launcher_package = produce_relative_launcher(config,
651                                                  logger,
652                                                  tmp_working_dir,
653                                                  launcher_name,
654                                                  binaries_dir_name)
655             d_products["launcher"] = (launcher_package, launcher_name)
656
657             # if the application contains mesa products, we generate in addition to the 
658             # classical salome launcher a launcher using mesa and called mesa_salome 
659             # (the mesa launcher will be used for remote usage through ssh).
660             if generate_mesa_launcher:
661                 #if there is one : store the use_mesa property
662                 restore_use_mesa_option=None
663                 if ('properties' in config.APPLICATION and 
664                     'use_mesa' in config.APPLICATION.properties):
665                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
666
667                 # activate mesa property, and generate a mesa launcher
668                 src.activate_mesa_property(config)  #activate use_mesa property
669                 launcher_mesa_name="mesa_"+launcher_name
670                 launcher_package_mesa = produce_relative_launcher(config,
671                                                      logger,
672                                                      tmp_working_dir,
673                                                      launcher_mesa_name,
674                                                      binaries_dir_name)
675                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
676
677                 # if there was a use_mesa value, we restore it
678                 # else we set it to the default value "no"
679                 if restore_use_mesa_option != None:
680                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
681                 else:
682                     config.APPLICATION.properties.use_mesa="no"
683
684             if options.sources:
685                 # if we mix binaries and sources, we add a copy of the launcher, 
686                 # prefixed  with "bin",in order to avoid clashes
687                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
688         else:
689             # Provide a script for the creation of an application EDF style
690             appli_script = product_appli_creation_script(config,
691                                                         logger,
692                                                         tmp_working_dir,
693                                                         binaries_dir_name)
694             
695             d_products["appli script"] = (appli_script, "create_appli.py")
696
697     # Put also the environment file
698     env_file = produce_relative_env_files(config,
699                                            logger,
700                                            tmp_working_dir,
701                                            binaries_dir_name)
702
703     if src.architecture.is_windows():
704       filename  = "env_launch.bat"
705     else:
706       filename  = "env_launch.sh"
707     d_products["environment file"] = (env_file, filename)      
708     return d_products
709
710 def source_package(sat, config, logger, options, tmp_working_dir):
711     '''Prepare a dictionary that stores all the needed directories and files to
712        add in a source package.
713     
714     :param config Config: The global configuration.
715     :param logger Logger: the logging instance
716     :param options OptResult: the options of the launched command
717     :param tmp_working_dir str: The temporary local directory containing some 
718                                 specific directories or files needed in the 
719                                 binary package
720     :return: the dictionary that stores all the needed directories and files to
721              add in a source package.
722              {label : (path_on_local_machine, path_in_archive)}
723     :rtype: dict
724     '''
725     
726     d_archives={}
727     # Get all the products that are prepared using an archive
728     # unless ftp mode is specified (in this case the user of the
729     # archive will get the sources through the ftp mode of sat prepare
730     if not options.ftp:
731         logger.write("Find archive products ... ")
732         d_archives, l_pinfo_vcs = get_archives(config, logger)
733         logger.write("Done\n")
734
735     d_archives_vcs = {}
736     if not options.with_vcs and len(l_pinfo_vcs) > 0:
737         # Make archives with the products that are not prepared using an archive
738         # (git, cvs, svn, etc)
739         logger.write("Construct archives for vcs products ... ")
740         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
741                                           sat,
742                                           config,
743                                           logger,
744                                           tmp_working_dir)
745         logger.write("Done\n")
746
747     # Create a project
748     logger.write("Create the project ... ")
749     d_project = create_project_for_src_package(config,
750                                                tmp_working_dir,
751                                                options.with_vcs,
752                                                options.ftp)
753     logger.write("Done\n")
754     
755     # Add salomeTools
756     tmp_sat = add_salomeTools(config, tmp_working_dir)
757     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
758     
759     # Add a sat symbolic link if not win
760     if not src.architecture.is_windows():
761         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
762         try:
763             t = os.getcwd()
764         except:
765             # In the jobs, os.getcwd() can fail
766             t = config.LOCAL.workdir
767         os.chdir(tmp_working_dir)
768         if os.path.lexists(tmp_satlink_path):
769             os.remove(tmp_satlink_path)
770         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
771         os.chdir(t)
772         
773         d_sat["sat link"] = (tmp_satlink_path, "sat")
774     
775     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
776     return d_source
777
778 def get_archives(config, logger):
779     '''Find all the products that are get using an archive and all the products
780        that are get using a vcs (git, cvs, svn) repository.
781     
782     :param config Config: The global configuration.
783     :param logger Logger: the logging instance
784     :return: the dictionary {name_product : 
785              (local path of its archive, path in the package of its archive )}
786              and the list of specific configuration corresponding to the vcs 
787              products
788     :rtype: (Dict, List)
789     '''
790     # Get the list of product informations
791     l_products_name = config.APPLICATION.products.keys()
792     l_product_info = src.product.get_products_infos(l_products_name,
793                                                     config)
794     d_archives = {}
795     l_pinfo_vcs = []
796     for p_name, p_info in l_product_info:
797         # skip product with property not_in_package set to yes
798         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
799             continue  
800         # ignore the native and fixed products
801         if (src.product.product_is_native(p_info) 
802                 or src.product.product_is_fixed(p_info)):
803             continue
804         if p_info.get_source == "archive":
805             archive_path = p_info.archive_info.archive_name
806             archive_name = os.path.basename(archive_path)
807             d_archives[p_name] = (archive_path,
808                                   os.path.join(ARCHIVE_DIR, archive_name))
809             if (src.appli_test_property(config,"pip", "yes") and 
810                 src.product.product_test_property(p_info,"pip", "yes")):
811                 # if pip mode is activated, and product is managed by pip
812                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
813                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
814                     "%s-%s*" % (p_info.name, p_info.version))
815                 pip_wheel_path=glob.glob(pip_wheel_pattern)
816                 msg_pip_not_found="Error in get_archive, pip wheel for "\
817                                   "product %s-%s was not found in %s directory"
818                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
819                                   "product %s-%s were found in %s directory"
820                 if len(pip_wheel_path)==0:
821                     raise src.SatException(msg_pip_not_found %\
822                         (p_info.name, p_info.version, pip_wheels_dir))
823                 if len(pip_wheel_path)>1:
824                     raise src.SatException(msg_pip_two_or_more %\
825                         (p_info.name, p_info.version, pip_wheels_dir))
826
827                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
828                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
829                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
830         else:
831             # this product is not managed by archive, 
832             # an archive of the vcs directory will be created by get_archive_vcs
833             l_pinfo_vcs.append((p_name, p_info)) 
834             
835     return d_archives, l_pinfo_vcs
836
837 def add_salomeTools(config, tmp_working_dir):
838     '''Prepare a version of salomeTools that has a specific local.pyconf file 
839        configured for a source package.
840
841     :param config Config: The global configuration.
842     :param tmp_working_dir str: The temporary local directory containing some 
843                                 specific directories or files needed in the 
844                                 source package
845     :return: The path to the local salomeTools directory to add in the package
846     :rtype: str
847     '''
848     # Copy sat in the temporary working directory
849     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
850     sat_running_path = src.Path(config.VARS.salometoolsway)
851     sat_running_path.copy(sat_tmp_path)
852     
853     # Update the local.pyconf file that contains the path to the project
854     local_pyconf_name = "local.pyconf"
855     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
856     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
857     # Remove the .pyconf file in the root directory of salomeTools if there is
858     # any. (For example when launching jobs, a pyconf file describing the jobs 
859     # can be here and is not useful) 
860     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
861     for file_or_dir in files_or_dir_SAT:
862         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
863             file_path = os.path.join(tmp_working_dir,
864                                      "salomeTools",
865                                      file_or_dir)
866             os.remove(file_path)
867     
868     ff = open(local_pyconf_file, "w")
869     ff.write(LOCAL_TEMPLATE)
870     ff.close()
871     
872     return sat_tmp_path.path
873
874 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
875     '''For sources package that require that all products are get using an 
876        archive, one has to create some archive for the vcs products.
877        So this method calls the clean and source command of sat and then create
878        the archives.
879
880     :param l_pinfo_vcs List: The list of specific configuration corresponding to
881                              each vcs product
882     :param sat Sat: The Sat instance that can be called to clean and source the
883                     products
884     :param config Config: The global configuration.
885     :param logger Logger: the logging instance
886     :param tmp_working_dir str: The temporary local directory containing some 
887                                 specific directories or files needed in the 
888                                 source package
889     :return: the dictionary that stores all the archives to add in the source 
890              package. {label : (path_on_local_machine, path_in_archive)}
891     :rtype: dict
892     '''
893     # clean the source directory of all the vcs products, then use the source 
894     # command and thus construct an archive that will not contain the patches
895     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
896     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
897       logger.write(_("\nclean sources\n"))
898       args_clean = config.VARS.application
899       args_clean += " --sources --products "
900       args_clean += ",".join(l_prod_names)
901       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
902       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
903     if True:
904       # source
905       logger.write(_("get sources\n"))
906       args_source = config.VARS.application
907       args_source += " --products "
908       args_source += ",".join(l_prod_names)
909       svgDir = sat.cfg.APPLICATION.workdir
910       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
911       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
912       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
913       # DBG.write("sat config id", id(sat.cfg), True)
914       # shit as config is not same id() as for sat.source()
915       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
916       import source
917       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
918       
919       # make the new archives
920       d_archives_vcs = {}
921       for pn, pinfo in l_pinfo_vcs:
922           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
923           logger.write("make archive vcs '%s'\n" % path_archive)
924           d_archives_vcs[pn] = (path_archive,
925                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
926       sat.cfg.APPLICATION.workdir = svgDir
927       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
928     return d_archives_vcs
929
930 def make_archive(prod_name, prod_info, where):
931     '''Create an archive of a product by searching its source directory.
932
933     :param prod_name str: The name of the product.
934     :param prod_info Config: The specific configuration corresponding to the 
935                              product
936     :param where str: The path of the repository where to put the resulting 
937                       archive
938     :return: The path of the resulting archive
939     :rtype: str
940     '''
941     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
942     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
943     local_path = prod_info.source_dir
944     tar_prod.add(local_path,
945                  arcname=prod_name,
946                  exclude=exclude_VCS_and_extensions)
947     tar_prod.close()
948     return path_targz_prod       
949
950 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
951     '''Create a specific project for a source package.
952
953     :param config Config: The global configuration.
954     :param tmp_working_dir str: The temporary local directory containing some 
955                                 specific directories or files needed in the 
956                                 source package
957     :param with_vcs boolean: True if the package is with vcs products (not 
958                              transformed into archive products)
959     :param with_ftp boolean: True if the package use ftp servers to get archives
960     :return: The dictionary 
961              {"project" : (produced project, project path in the archive)}
962     :rtype: Dict
963     '''
964
965     # Create in the working temporary directory the full project tree
966     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
967     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
968                                          "products")
969     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
970                                          "products",
971                                          "compil_scripts")
972     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
973                                          "products",
974                                          "env_scripts")
975     patches_tmp_dir = os.path.join(project_tmp_dir,
976                                          "products",
977                                          "patches")
978     application_tmp_dir = os.path.join(project_tmp_dir,
979                                          "applications")
980     for directory in [project_tmp_dir,
981                       compil_scripts_tmp_dir,
982                       env_scripts_tmp_dir,
983                       patches_tmp_dir,
984                       application_tmp_dir]:
985         src.ensure_path_exists(directory)
986
987     # Create the pyconf that contains the information of the project
988     project_pyconf_name = "project.pyconf"        
989     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
990     ff = open(project_pyconf_file, "w")
991     ff.write(PROJECT_TEMPLATE)
992     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
993         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
994         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
995             ftp_path=ftp_path+":"+ftpserver
996         ftp_path+='"'
997         ff.write("# ftp servers where to search for prerequisite archives\n")
998         ff.write(ftp_path)
999     # add licence paths if any
1000     if len(config.PATHS.LICENCEPATH) > 0:  
1001         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1002         for path in config.PATHS.LICENCEPATH[1:]:
1003             licence_path=licence_path+":"+path
1004         licence_path+='"'
1005         ff.write("\n# Where to search for licences\n")
1006         ff.write(licence_path)
1007         
1008
1009     ff.close()
1010     
1011     # Loop over the products to get there pyconf and all the scripts 
1012     # (compilation, environment, patches)
1013     # and create the pyconf file to add to the project
1014     lproducts_name = config.APPLICATION.products.keys()
1015     l_products = src.product.get_products_infos(lproducts_name, config)
1016     for p_name, p_info in l_products:
1017         # skip product with property not_in_package set to yes
1018         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1019             continue  
1020         find_product_scripts_and_pyconf(p_name,
1021                                         p_info,
1022                                         config,
1023                                         with_vcs,
1024                                         compil_scripts_tmp_dir,
1025                                         env_scripts_tmp_dir,
1026                                         patches_tmp_dir,
1027                                         products_pyconf_tmp_dir)
1028     
1029     find_application_pyconf(config, application_tmp_dir)
1030     
1031     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1032     return d_project
1033
1034 def find_product_scripts_and_pyconf(p_name,
1035                                     p_info,
1036                                     config,
1037                                     with_vcs,
1038                                     compil_scripts_tmp_dir,
1039                                     env_scripts_tmp_dir,
1040                                     patches_tmp_dir,
1041                                     products_pyconf_tmp_dir):
1042     '''Create a specific pyconf file for a given product. Get its environment 
1043        script, its compilation script and patches and put it in the temporary
1044        working directory. This method is used in the source package in order to
1045        construct the specific project.
1046
1047     :param p_name str: The name of the product.
1048     :param p_info Config: The specific configuration corresponding to the 
1049                              product
1050     :param config Config: The global configuration.
1051     :param with_vcs boolean: True if the package is with vcs products (not 
1052                              transformed into archive products)
1053     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1054                                        scripts directory of the project.
1055     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1056                                     directory of the project.
1057     :param patches_tmp_dir str: The path to the temporary patch scripts 
1058                                 directory of the project.
1059     :param products_pyconf_tmp_dir str: The path to the temporary product 
1060                                         scripts directory of the project.
1061     '''
1062     
1063     # read the pyconf of the product
1064     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1065
1066     # find the compilation script if any
1067     if src.product.product_has_script(p_info):
1068         compil_script_path = src.Path(p_info.compil_script)
1069         compil_script_path.copy(compil_scripts_tmp_dir)
1070
1071     # find the environment script if any
1072     if src.product.product_has_env_script(p_info):
1073         env_script_path = src.Path(p_info.environ.env_script)
1074         env_script_path.copy(env_scripts_tmp_dir)
1075
1076     # find the patches if any
1077     if src.product.product_has_patches(p_info):
1078         patches = src.pyconf.Sequence()
1079         for patch_path in p_info.patches:
1080             p_path = src.Path(patch_path)
1081             p_path.copy(patches_tmp_dir)
1082             patches.append(os.path.basename(patch_path), "")
1083
1084     if (not with_vcs) and src.product.product_is_vcs(p_info):
1085         # in non vcs mode, if the product is not archive, then make it become archive.
1086
1087         # depending upon the incremental mode, select impacted sections
1088         if "properties" in p_info and "incremental" in p_info.properties and\
1089             p_info.properties.incremental == "yes":
1090             sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1091         else:
1092             sections = [p_info.section]
1093         for section in sections:
1094             if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1095                 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1096                           (p_name,section))
1097                 product_pyconf_cfg[section].get_source = "archive"
1098                 if not "archive_info" in product_pyconf_cfg[section]:
1099                     product_pyconf_cfg[section].addMapping("archive_info",
1100                                         src.pyconf.Mapping(product_pyconf_cfg),
1101                                         "")
1102                     product_pyconf_cfg[section].archive_info.archive_name =\
1103                         p_info.name + ".tgz"
1104     
1105     # write the pyconf file to the temporary project location
1106     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1107                                            p_name + ".pyconf")
1108     ff = open(product_tmp_pyconf_path, 'w')
1109     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1110     product_pyconf_cfg.__save__(ff, 1)
1111     ff.close()
1112
1113 def find_application_pyconf(config, application_tmp_dir):
1114     '''Find the application pyconf file and put it in the specific temporary 
1115        directory containing the specific project of a source package.
1116
1117     :param config Config: The global configuration.
1118     :param application_tmp_dir str: The path to the temporary application 
1119                                        scripts directory of the project.
1120     '''
1121     # read the pyconf of the application
1122     application_name = config.VARS.application
1123     application_pyconf_path = src.find_file_in_lpath(
1124                                             application_name + ".pyconf",
1125                                             config.PATHS.APPLICATIONPATH)
1126     application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1127     
1128     # Change the workdir
1129     application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1130                                     application_pyconf_cfg,
1131                                     src.pyconf.DOLLAR,
1132                                     'VARS.salometoolsway + $VARS.sep + ".."')
1133
1134     # Prevent from compilation in base
1135     application_pyconf_cfg.APPLICATION.base = "no"
1136     
1137     #remove products that are not in config (which were filtered by --without_properties)
1138     for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1139         if product_name not in config.APPLICATION.products.keys():
1140             application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1141
1142     # write the pyconf file to the temporary application location
1143     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1144                                                application_name + ".pyconf")
1145
1146     ff = open(application_tmp_pyconf_path, 'w')
1147     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1148     application_pyconf_cfg.__save__(ff, 1)
1149     ff.close()
1150
1151 def sat_package(config, tmp_working_dir, options, logger):
1152     '''Prepare a dictionary that stores all the needed directories and files to
1153        add in a salomeTool package.
1154     
1155     :param tmp_working_dir str: The temporary local working directory 
1156     :param options OptResult: the options of the launched command
1157     :return: the dictionary that stores all the needed directories and files to
1158              add in a salomeTool package.
1159              {label : (path_on_local_machine, path_in_archive)}
1160     :rtype: dict
1161     '''
1162     d_project = {}
1163
1164     # we include sat himself
1165     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1166
1167     # and we overwrite local.pyconf with a clean wersion.
1168     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1169     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1170     local_cfg = src.pyconf.Config(local_file_path)
1171     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1172     local_cfg.LOCAL["base"] = "default"
1173     local_cfg.LOCAL["workdir"] = "default"
1174     local_cfg.LOCAL["log_dir"] = "default"
1175     local_cfg.LOCAL["archive_dir"] = "default"
1176     local_cfg.LOCAL["VCS"] = "None"
1177     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1178
1179     # if the archive contains a project, we write its relative path in local.pyconf
1180     if options.project:
1181         project_arch_path = os.path.join("projects", options.project, 
1182                                          os.path.basename(options.project_file_path))
1183         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1184
1185     ff = open(local_pyconf_tmp_path, 'w')
1186     local_cfg.__save__(ff, 1)
1187     ff.close()
1188     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1189     return d_project
1190     
1191
1192 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1193     '''Prepare a dictionary that stores all the needed directories and files to
1194        add in a project package.
1195     
1196     :param project_file_path str: The path to the local project.
1197     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1198     :param tmp_working_dir str: The temporary local directory containing some 
1199                                 specific directories or files needed in the 
1200                                 project package
1201     :param embedded_in_sat boolean : the project package is embedded in a sat package
1202     :return: the dictionary that stores all the needed directories and files to
1203              add in a project package.
1204              {label : (path_on_local_machine, path_in_archive)}
1205     :rtype: dict
1206     '''
1207     d_project = {}
1208     # Read the project file and get the directories to add to the package
1209     
1210     try: 
1211       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1212     except:
1213       logger.write("""
1214 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1215       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1216       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1217     
1218     paths = {"APPLICATIONPATH" : "applications",
1219              "PRODUCTPATH" : "products",
1220              "JOBPATH" : "jobs",
1221              "MACHINEPATH" : "machines"}
1222     if not ftp_mode:
1223         paths["ARCHIVEPATH"] = "archives"
1224
1225     # Loop over the project paths and add it
1226     project_file_name = os.path.basename(project_file_path)
1227     for path in paths:
1228         if path not in project_pyconf_cfg:
1229             continue
1230         if embedded_in_sat:
1231             dest_path = os.path.join("projects", name_project, paths[path])
1232             project_file_dest = os.path.join("projects", name_project, project_file_name)
1233         else:
1234             dest_path = paths[path]
1235             project_file_dest = project_file_name
1236
1237         # Add the directory to the files to add in the package
1238         d_project[path] = (project_pyconf_cfg[path], dest_path)
1239
1240         # Modify the value of the path in the package
1241         project_pyconf_cfg[path] = src.pyconf.Reference(
1242                                     project_pyconf_cfg,
1243                                     src.pyconf.DOLLAR,
1244                                     'project_path + "/' + paths[path] + '"')
1245     
1246     # Modify some values
1247     if "project_path" not in project_pyconf_cfg:
1248         project_pyconf_cfg.addMapping("project_path",
1249                                       src.pyconf.Mapping(project_pyconf_cfg),
1250                                       "")
1251     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1252                                                            src.pyconf.DOLLAR,
1253                                                            'PWD')
1254     # we don't want to export these two fields
1255     project_pyconf_cfg.__delitem__("file_path")
1256     project_pyconf_cfg.__delitem__("PWD")
1257     if ftp_mode:
1258         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1259     
1260     # Write the project pyconf file
1261     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1262     ff = open(project_pyconf_tmp_path, 'w')
1263     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1264     project_pyconf_cfg.__save__(ff, 1)
1265     ff.close()
1266     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1267     
1268     return d_project
1269
1270 def add_readme(config, options, where):
1271     readme_path = os.path.join(where, "README")
1272     with codecs.open(readme_path, "w", 'utf-8') as f:
1273
1274     # templates for building the header
1275         readme_header="""
1276 # This package was generated with sat $version
1277 # Date: $date
1278 # User: $user
1279 # Distribution : $dist
1280
1281 In the following, $$ROOT represents the directory where you have installed 
1282 SALOME (the directory where this file is located).
1283
1284 """
1285         if src.architecture.is_windows():
1286             readme_header = readme_header.replace('$$ROOT','%ROOT%')
1287         readme_compilation_with_binaries="""
1288
1289 compilation based on the binaries used as prerequisites
1290 =======================================================
1291
1292 If you fail to compile the complete application (for example because
1293 you are not root on your system and cannot install missing packages), you
1294 may try a partial compilation based on the binaries.
1295 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1296 and do some substitutions on cmake and .la files (replace the build directories
1297 with local paths).
1298 The procedure to do it is:
1299  1) Remove or rename INSTALL directory if it exists
1300  2) Execute the shell script install_bin.sh:
1301  > cd $ROOT
1302  > ./install_bin.sh
1303  3) Use SalomeTool (as explained in Sources section) and compile only the 
1304     modules you need to (with -p option)
1305
1306 """
1307         readme_header_tpl=string.Template(readme_header)
1308         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1309                 "README_BIN.template")
1310         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1311                 "README_LAUNCHER.template")
1312         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1313                 "README_BIN_VIRTUAL_APP.template")
1314         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1315                 "README_SRC.template")
1316         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1317                 "README_PROJECT.template")
1318         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1319                 "README_SAT.template")
1320
1321         # prepare substitution dictionary
1322         d = dict()
1323         d['user'] = config.VARS.user
1324         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1325         d['version'] = src.get_salometool_version(config)
1326         d['dist'] = config.VARS.dist
1327         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1328
1329         if options.binaries or options.sources:
1330             d['application'] = config.VARS.application
1331             d['BINARIES']    = config.INTERNAL.config.install_dir
1332             d['SEPARATOR'] = config.VARS.sep
1333             if src.architecture.is_windows():
1334                 d['operatingSystem'] = 'Windows'
1335                 d['PYTHON3'] = 'python3'
1336                 d['ROOT']    = '%ROOT%'
1337             else:
1338                 d['operatingSystem'] = 'Linux'
1339                 d['PYTHON3'] = ''
1340                 d['ROOT']    = '$ROOT'
1341             f.write("# Application: " + d['application'] + "\n")
1342             if 'KERNEL' in config.APPLICATION.products:
1343                 VersionSalome = src.get_salome_version(config)
1344                 # Case where SALOME has the launcher that uses the SalomeContext API
1345                 if VersionSalome >= 730:
1346                     d['launcher'] = config.APPLICATION.profile.launcher_name
1347                 else:
1348                     d['virtual_app'] = 'runAppli' # this info is not used now)
1349
1350         # write the specific sections
1351         if options.binaries:
1352             f.write(src.template.substitute(readme_template_path_bin, d))
1353             if "virtual_app" in d:
1354                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1355             if "launcher" in d:
1356                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1357
1358         if options.sources:
1359             f.write(src.template.substitute(readme_template_path_src, d))
1360
1361         if options.binaries and options.sources and not src.architecture.is_windows():
1362             f.write(readme_compilation_with_binaries)
1363
1364         if options.project:
1365             f.write(src.template.substitute(readme_template_path_pro, d))
1366
1367         if options.sat:
1368             f.write(src.template.substitute(readme_template_path_sat, d))
1369     
1370     return readme_path
1371
1372 def update_config(config, prop, value):
1373     '''Remove from config.APPLICATION.products the products that have the property given as input.
1374     
1375     :param config Config: The global config.
1376     :param prop str: The property to filter
1377     :param value str: The value of the property to filter
1378     '''
1379     # if there is no APPLICATION (ex sat package -t) : nothing to do
1380     if "APPLICATION" in config:
1381         l_product_to_remove = []
1382         for product_name in config.APPLICATION.products.keys():
1383             prod_cfg = src.product.get_product_config(config, product_name)
1384             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1385                 l_product_to_remove.append(product_name)
1386         for product_name in l_product_to_remove:
1387             config.APPLICATION.products.__delitem__(product_name)
1388
1389 def description():
1390     '''method that is called when salomeTools is called with --help option.
1391     
1392     :return: The text to display for the package command description.
1393     :rtype: str
1394     '''
1395     return _("""
1396 The package command creates a tar file archive of a product.
1397 There are four kinds of archive, which can be mixed:
1398
1399  1 - The binary archive. 
1400      It contains the product installation directories plus a launcher.
1401  2 - The sources archive. 
1402      It contains the product archives, a project (the application plus salomeTools).
1403  3 - The project archive. 
1404      It contains a project (give the project file path as argument).
1405  4 - The salomeTools archive. 
1406      It contains code utility salomeTools.
1407
1408 example:
1409  >> sat package SALOME-master --binaries --sources""")
1410   
1411 def run(args, runner, logger):
1412     '''method that is called when salomeTools is called with package parameter.
1413     '''
1414     
1415     # Parse the options
1416     (options, args) = parser.parse_args(args)
1417
1418     # Check that a type of package is called, and only one
1419     all_option_types = (options.binaries,
1420                         options.sources,
1421                         options.project not in ["", None],
1422                         options.sat)
1423
1424     # Check if no option for package type
1425     if all_option_types.count(True) == 0:
1426         msg = _("Error: Precise a type for the package\nUse one of the "
1427                 "following options: --binaries, --sources, --project or"
1428                 " --salometools")
1429         logger.write(src.printcolors.printcError(msg), 1)
1430         logger.write("\n", 1)
1431         return 1
1432     
1433     # The repository where to put the package if not Binary or Source
1434     package_default_path = runner.cfg.LOCAL.workdir
1435     
1436     # if the package contains binaries or sources:
1437     if options.binaries or options.sources:
1438         # Check that the command has been called with an application
1439         src.check_config_has_application(runner.cfg)
1440
1441         # Display information
1442         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1443                                                     runner.cfg.VARS.application), 1)
1444         
1445         # Get the default directory where to put the packages
1446         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1447         src.ensure_path_exists(package_default_path)
1448         
1449     # if the package contains a project:
1450     if options.project:
1451         # check that the project is visible by SAT
1452         projectNameFile = options.project + ".pyconf"
1453         foundProject = None
1454         for i in runner.cfg.PROJECTS.project_file_paths:
1455             baseName = os.path.basename(i)
1456             if baseName == projectNameFile:
1457                 foundProject = i
1458                 break
1459
1460         if foundProject is None:
1461             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1462             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1463 known projects are:
1464 %(2)s
1465
1466 Please add it in file:
1467 %(3)s""" % \
1468                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1469             logger.write(src.printcolors.printcError(msg), 1)
1470             logger.write("\n", 1)
1471             return 1
1472         else:
1473             options.project_file_path = foundProject
1474             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1475     
1476     # Remove the products that are filtered by the --without_properties option
1477     if options.without_properties:
1478         app = runner.cfg.APPLICATION
1479         logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1480         prop, value = options.without_properties
1481         update_config(runner.cfg, prop, value)
1482         logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1483
1484     # Remove from config the products that have the not_in_package property
1485     update_config(runner.cfg, "not_in_package", "yes")
1486     
1487     # get the name of the archive or build it
1488     if options.name:
1489         if os.path.basename(options.name) == options.name:
1490             # only a name (not a path)
1491             archive_name = options.name           
1492             dir_name = package_default_path
1493         else:
1494             archive_name = os.path.basename(options.name)
1495             dir_name = os.path.dirname(options.name)
1496         
1497         # suppress extension
1498         if archive_name[-len(".tgz"):] == ".tgz":
1499             archive_name = archive_name[:-len(".tgz")]
1500         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1501             archive_name = archive_name[:-len(".tar.gz")]
1502         
1503     else:
1504         archive_name=""
1505         dir_name = package_default_path
1506         if options.binaries or options.sources:
1507             archive_name = runner.cfg.APPLICATION.name
1508
1509         if options.binaries:
1510             archive_name += "-"+runner.cfg.VARS.dist
1511             
1512         if options.sources:
1513             archive_name += "-SRC"
1514             if options.with_vcs:
1515                 archive_name += "-VCS"
1516
1517         if options.sat:
1518             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1519
1520         if options.project:
1521             if options.sat:
1522                 archive_name += "_" 
1523             archive_name += ("satproject_" + options.project)
1524  
1525         if len(archive_name)==0: # no option worked 
1526             msg = _("Error: Cannot name the archive\n"
1527                     " check if at least one of the following options was "
1528                     "selected : --binaries, --sources, --project or"
1529                     " --salometools")
1530             logger.write(src.printcolors.printcError(msg), 1)
1531             logger.write("\n", 1)
1532             return 1
1533  
1534     path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1535     
1536     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1537
1538     # Create a working directory for all files that are produced during the
1539     # package creation and that will be removed at the end of the command
1540     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1541     src.ensure_path_exists(tmp_working_dir)
1542     logger.write("\n", 5)
1543     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1544     
1545     logger.write("\n", 3)
1546
1547     msg = _("Preparation of files to add to the archive")
1548     logger.write(src.printcolors.printcLabel(msg), 2)
1549     logger.write("\n", 2)
1550     
1551     d_files_to_add={}  # content of the archive
1552
1553     # a dict to hold paths that will need to be substitute for users recompilations
1554     d_paths_to_substitute={}  
1555
1556     if options.binaries:
1557         d_bin_files_to_add = binary_package(runner.cfg,
1558                                             logger,
1559                                             options,
1560                                             tmp_working_dir)
1561         # for all binaries dir, store the substitution that will be required 
1562         # for extra compilations
1563         for key in d_bin_files_to_add:
1564             if key.endswith("(bin)"):
1565                 source_dir = d_bin_files_to_add[key][0]
1566                 path_in_archive = d_bin_files_to_add[key][1].replace(
1567                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1568                    runner.cfg.INTERNAL.config.install_dir)
1569                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1570                     # if basename is the same we will just substitute the dirname 
1571                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1572                         os.path.dirname(path_in_archive)
1573                 else:
1574                     d_paths_to_substitute[source_dir]=path_in_archive
1575
1576         d_files_to_add.update(d_bin_files_to_add)
1577     if options.sources:
1578         d_files_to_add.update(source_package(runner,
1579                                         runner.cfg,
1580                                         logger, 
1581                                         options,
1582                                         tmp_working_dir))
1583         if options.binaries:
1584             # for archives with bin and sources we provide a shell script able to 
1585             # install binaries for compilation
1586             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1587                                                       tmp_working_dir,
1588                                                       d_paths_to_substitute,
1589                                                       "install_bin.sh")
1590             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1591             logger.write("substitutions that need to be done later : \n", 5)
1592             logger.write(str(d_paths_to_substitute), 5)
1593             logger.write("\n", 5)
1594     else:
1595         # --salomeTool option is not considered when --sources is selected, as this option
1596         # already brings salomeTool!
1597         if options.sat:
1598             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1599                                   options, logger))
1600         
1601     if options.project:
1602         DBG.write("config for package %s" % options.project, runner.cfg)
1603         d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1604
1605     if not(d_files_to_add):
1606         msg = _("Error: Empty dictionnary to build the archive!\n")
1607         logger.write(src.printcolors.printcError(msg), 1)
1608         logger.write("\n", 1)
1609         return 1
1610
1611     # Add the README file in the package
1612     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1613     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1614
1615     # Add the additional files of option add_files
1616     if options.add_files:
1617         for file_path in options.add_files:
1618             if not os.path.exists(file_path):
1619                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1620                 continue
1621             file_name = os.path.basename(file_path)
1622             d_files_to_add[file_name] = (file_path, file_name)
1623
1624     logger.write("\n", 2)
1625     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1626     logger.write("\n", 2)
1627     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1628
1629     res = 0
1630     try:
1631         # Creating the object tarfile
1632         tar = tarfile.open(path_targz, mode='w:gz')
1633         
1634         # get the filtering function if needed
1635         filter_function = exclude_VCS_and_extensions
1636
1637         # Add the files to the tarfile object
1638         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1639         tar.close()
1640     except KeyboardInterrupt:
1641         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1642         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1643         # remove the working directory
1644         shutil.rmtree(tmp_working_dir)
1645         logger.write(_("OK"), 1)
1646         logger.write(_("\n"), 1)
1647         return 1
1648     
1649     # case if no application, only package sat as 'sat package -t'
1650     try:
1651         app = runner.cfg.APPLICATION
1652     except:
1653         app = None
1654
1655     # unconditionaly remove the tmp_local_working_dir
1656     if app is not None:
1657         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1658         if os.path.isdir(tmp_local_working_dir):
1659             shutil.rmtree(tmp_local_working_dir)
1660
1661     # remove the tmp directory, unless user has registered as developer
1662     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1663         shutil.rmtree(tmp_working_dir)
1664     
1665     # Print again the path of the package
1666     logger.write("\n", 2)
1667     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1668     
1669     return res