Salome HOME
integration derniers ajustements pour windows
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 BINARY = "binary"
35 SOURCE = "Source"
36 PROJECT = "Project"
37 SAT = "Sat"
38
39 ARCHIVE_DIR = "ARCHIVES"
40 PROJECT_DIR = "PROJECT"
41
42 IGNORED_DIRS = [".git", ".svn"]
43 IGNORED_EXTENSIONS = []
44
45 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
46
47 PROJECT_TEMPLATE = """#!/usr/bin/env python
48 #-*- coding:utf-8 -*-
49
50 # The path to the archive root directory
51 root_path : $PWD + "/../"
52 # path to the PROJECT
53 project_path : $PWD + "/"
54
55 # Where to search the archives of the products
56 ARCHIVEPATH : $root_path + "ARCHIVES"
57 # Where to search the pyconf of the applications
58 APPLICATIONPATH : $project_path + "applications/"
59 # Where to search the pyconf of the products
60 PRODUCTPATH : $project_path + "products/"
61 # Where to search the pyconf of the jobs of the project
62 JOBPATH : $project_path + "jobs/"
63 # Where to search the pyconf of the machines of the project
64 MACHINEPATH : $project_path + "machines/"
65 """
66
67 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
68 #-*- coding:utf-8 -*-
69
70   LOCAL :
71   {
72     base : 'default'
73     workdir : 'default'
74     log_dir : 'default'
75     archive_dir : 'default'
76     VCS : None
77     tag : None
78   }
79
80 PROJECTS :
81 {
82 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
83 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
84 }
85 """)
86
87 # Define all possible option for the package command :  sat package <options>
88 parser = src.options.Options()
89 parser.add_option('b', 'binaries', 'boolean', 'binaries',
90     _('Optional: Produce a binary package.'), False)
91 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
92     _('Optional: Only binary package: produce the archive even if '
93       'there are some missing products.'), False)
94 parser.add_option('s', 'sources', 'boolean', 'sources',
95     _('Optional: Produce a compilable archive of the sources of the '
96       'application.'), False)
97 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
98     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
99       'Sat prepare will use VCS mode instead to retrieve them'),
100     False)
101 parser.add_option('', 'ftp', 'boolean', 'ftp',
102     _('Optional: Do not embed archives for products in archive mode.' 
103     'Sat prepare will use ftp instead to retrieve them'),
104     False)
105 parser.add_option('p', 'project', 'string', 'project',
106     _('Optional: Produce an archive that contains a project.'), "")
107 parser.add_option('t', 'salometools', 'boolean', 'sat',
108     _('Optional: Produce an archive that contains salomeTools.'), False)
109 parser.add_option('n', 'name', 'string', 'name',
110     _('Optional: The name or full path of the archive.'), None)
111 parser.add_option('', 'add_files', 'list2', 'add_files',
112     _('Optional: The list of additional files to add to the archive.'), [])
113 parser.add_option('', 'without_properties', 'properties', 'without_properties',
114     _('Optional: Filter the products by their properties.\n\tSyntax: '
115       '--without_properties <property>:<value>'))
116
117
118 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
119     '''Create an archive containing all directories and files that are given in
120        the d_content argument.
121     
122     :param tar tarfile: The tarfile instance used to make the archive.
123     :param name_archive str: The name of the archive to make.
124     :param d_content dict: The dictionary that contain all directories and files
125                            to add in the archive.
126                            d_content[label] = 
127                                         (path_on_local_machine, path_in_archive)
128     :param logger Logger: the logging instance
129     :param f_exclude Function: the function that filters
130     :return: 0 if success, 1 if not.
131     :rtype: int
132     '''
133     # get the max length of the messages in order to make the display
134     max_len = len(max(d_content.keys(), key=len))
135     
136     success = 0
137     # loop over each directory or file stored in the d_content dictionary
138     names = sorted(d_content.keys())
139     DBG.write("add tar names", names)
140
141     # used to avoid duplications (for pip install in python, or single_install_dir cases)
142     already_added=set() 
143     for name in names:
144         # display information
145         len_points = max_len - len(name) + 3
146         local_path, archive_path = d_content[name]
147         in_archive = os.path.join(name_archive, archive_path)
148         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
149         # Get the local path and the path in archive 
150         # of the directory or file to add
151         # Add it in the archive
152         try:
153             key=local_path+"->"+in_archive
154             if key not in already_added:
155                 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
156                 already_added.add(key)
157             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
158         except Exception as e:
159             logger.write(src.printcolors.printcError(_("KO ")), 3)
160             logger.write(str(e), 3)
161             success = 1
162         logger.write("\n", 3)
163     return success
164
165 def exclude_VCS_and_extensions(filename):
166     ''' The function that is used to exclude from package the link to the 
167         VCS repositories (like .git)
168
169     :param filename Str: The filname to exclude (or not).
170     :return: True if the file has to be exclude
171     :rtype: Boolean
172     '''
173     for dir_name in IGNORED_DIRS:
174         if dir_name in filename:
175             return True
176     for extension in IGNORED_EXTENSIONS:
177         if filename.endswith(extension):
178             return True
179     return False
180
181 def produce_relative_launcher(config,
182                               logger,
183                               file_dir,
184                               file_name,
185                               binaries_dir_name):
186     '''Create a specific SALOME launcher for the binary package. This launcher 
187        uses relative paths.
188     
189     :param config Config: The global configuration.
190     :param logger Logger: the logging instance
191     :param file_dir str: the directory where to put the launcher
192     :param file_name str: The launcher name
193     :param binaries_dir_name str: the name of the repository where the binaries
194                                   are, in the archive.
195     :return: the path of the produced launcher
196     :rtype: str
197     '''
198     
199     # get KERNEL installation path 
200     kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
201
202     # set kernel bin dir (considering fhs property)
203     kernel_cfg = src.product.get_product_config(config, "KERNEL")
204     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
205         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
206     else:
207         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
208
209     # check if the application contains an application module
210     # check also if the application has a distene product, 
211     # in this case get its licence file name
212     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
213     salome_application_name="Not defined" 
214     distene_licence_file_name=False
215     for prod_name, prod_info in l_product_info:
216         # look for a "salome application" and a distene product
217         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
218             distene_licence_file_name = src.product.product_has_licence(prod_info, 
219                                             config.PATHS.LICENCEPATH) 
220         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
221             salome_application_name=prod_info.name
222
223     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
224     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
225     if salome_application_name == "Not defined":
226         app_root_dir=kernel_root_dir
227     else:
228         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
229
230     additional_env={}
231     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
232                                                    config.VARS.sep + bin_kernel_install_dir
233     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
234         additional_env['sat_python_version'] = 3
235     else:
236         additional_env['sat_python_version'] = 2
237
238     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
239
240     # create an environment file writer
241     writer = src.environment.FileEnvWriter(config,
242                                            logger,
243                                            file_dir,
244                                            src_root=None,
245                                            env_info=None)
246     
247     filepath = os.path.join(file_dir, file_name)
248     # Write
249     writer.write_env_file(filepath,
250                           False,  # for launch
251                           "cfgForPy",
252                           additional_env=additional_env,
253                           no_path_init="False",
254                           for_package = binaries_dir_name)
255     
256     # Little hack to put out_dir_Path outside the strings
257     if src.architecture.is_windows():
258         src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
259         src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
260         src.replace_in_file(filepath, 'out_dir_Path;', '%out_dir_Path%;' )
261         src.replace_in_file(filepath, 'r"out_dir_Path', '%out_dir_Path% + r"' )
262         src.replace_in_file(filepath, "r'out_dir_Path + ", "%out_dir_Path% + r'" )
263     else:
264         src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
265         src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
266     
267     # A hack to put a call to a file for distene licence.
268     # It does nothing to an application that has no distene product
269     if distene_licence_file_name:
270         logger.write("Application has a distene licence file! We use it in package launcher", 5)
271         hack_for_distene_licence(filepath, distene_licence_file_name)
272        
273     # change the rights in order to make the file executable for everybody
274     os.chmod(filepath,
275              stat.S_IRUSR |
276              stat.S_IRGRP |
277              stat.S_IROTH |
278              stat.S_IWUSR |
279              stat.S_IXUSR |
280              stat.S_IXGRP |
281              stat.S_IXOTH)
282
283     return filepath
284
285 def hack_for_distene_licence(filepath, licence_file):
286     '''Replace the distene licence env variable by a call to a file.
287     
288     :param filepath Str: The path to the launcher to modify.
289     '''  
290     shutil.move(filepath, filepath + "_old")
291     fileout= filepath
292     filein = filepath + "_old"
293     fin = open(filein, "r")
294     fout = open(fileout, "w")
295     text = fin.readlines()
296     # Find the Distene section
297     num_line = -1
298     for i,line in enumerate(text):
299         if "# Set DISTENE License" in line:
300             num_line = i
301             break
302     if num_line == -1:
303         # No distene product, there is nothing to do
304         fin.close()
305         for line in text:
306             fout.write(line)
307         fout.close()
308         return
309     del text[num_line +1]
310     del text[num_line +1]
311     text_to_insert ="""    try:
312         distene_licence_file="%s"
313         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
314             import importlib.util
315             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
316             distene=importlib.util.module_from_spec(spec_dist)
317             spec_dist.loader.exec_module(distene)
318         else:
319             import imp
320             distene = imp.load_source('distene_licence', distene_licence_file)
321         distene.set_distene_variables(context)
322     except:
323         pass\n"""  % licence_file
324     text.insert(num_line + 1, text_to_insert)
325     for line in text:
326         fout.write(line)
327     fin.close()    
328     fout.close()
329     return
330     
331 def produce_relative_env_files(config,
332                               logger,
333                               file_dir,
334                               binaries_dir_name):
335     '''Create some specific environment files for the binary package. These 
336        files use relative paths.
337     
338     :param config Config: The global configuration.
339     :param logger Logger: the logging instance
340     :param file_dir str: the directory where to put the files
341     :param binaries_dir_name str: the name of the repository where the binaries
342                                   are, in the archive.
343     :return: the list of path of the produced environment files
344     :rtype: List
345     '''  
346     # create an environment file writer
347     writer = src.environment.FileEnvWriter(config,
348                                            logger,
349                                            file_dir,
350                                            src_root=None)
351     
352     if src.architecture.is_windows():
353       shell = "bat"
354       filename  = "env_launch.bat"
355     else:
356       shell = "bash"
357       filename  = "env_launch.sh"
358
359     # Write
360     filepath = writer.write_env_file(filename,
361                           False, # for launch
362                           shell,
363                           for_package = binaries_dir_name)
364
365     # Little hack to put out_dir_Path as environment variable
366     if src.architecture.is_windows() :
367       src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
368       src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
369     else:
370       src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
371
372     # change the rights in order to make the file executable for everybody
373     os.chmod(filepath,
374              stat.S_IRUSR |
375              stat.S_IRGRP |
376              stat.S_IROTH |
377              stat.S_IWUSR |
378              stat.S_IXUSR |
379              stat.S_IXGRP |
380              stat.S_IXOTH)
381     
382     return filepath
383
384 def produce_install_bin_file(config,
385                              logger,
386                              file_dir,
387                              d_sub,
388                              file_name):
389     '''Create a bash shell script which do substitutions in BIRARIES dir 
390        in order to use it for extra compilations.
391     
392     :param config Config: The global configuration.
393     :param logger Logger: the logging instance
394     :param file_dir str: the directory where to put the files
395     :param d_sub, dict: the dictionnary that contains the substitutions to be done
396     :param file_name str: the name of the install script file
397     :return: the produced file
398     :rtype: str
399     '''  
400     # Write
401     filepath = os.path.join(file_dir, file_name)
402     # open the file and write into it
403     # use codec utf-8 as sat variables are in unicode
404     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
405         installbin_template_path = os.path.join(config.VARS.internal_dir,
406                                         "INSTALL_BIN.template")
407         
408         # build the name of the directory that will contain the binaries
409         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
410         # build the substitution loop
411         loop_cmd = "for f in $(grep -RIl"
412         for key in d_sub:
413             loop_cmd += " -e "+ key
414         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
415                     '); do\n     sed -i "\n'
416         for key in d_sub:
417             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
418         loop_cmd += '            " $f\ndone'
419
420         d={}
421         d["BINARIES_DIR"] = binaries_dir_name
422         d["SUBSTITUTION_LOOP"]=loop_cmd
423         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
424         
425         # substitute the template and write it in file
426         content=src.template.substitute(installbin_template_path, d)
427         installbin_file.write(content)
428         # change the rights in order to make the file executable for everybody
429         os.chmod(filepath,
430                  stat.S_IRUSR |
431                  stat.S_IRGRP |
432                  stat.S_IROTH |
433                  stat.S_IWUSR |
434                  stat.S_IXUSR |
435                  stat.S_IXGRP |
436                  stat.S_IXOTH)
437     
438     return filepath
439
440 def product_appli_creation_script(config,
441                                   logger,
442                                   file_dir,
443                                   binaries_dir_name):
444     '''Create a script that can produce an application (EDF style) in the binary
445        package.
446     
447     :param config Config: The global configuration.
448     :param logger Logger: the logging instance
449     :param file_dir str: the directory where to put the file
450     :param binaries_dir_name str: the name of the repository where the binaries
451                                   are, in the archive.
452     :return: the path of the produced script file
453     :rtype: Str
454     '''
455     template_name = "create_appli.py.for_bin_packages.template"
456     template_path = os.path.join(config.VARS.internal_dir, template_name)
457     text_to_fill = open(template_path, "r").read()
458     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
459                                         '"' + binaries_dir_name + '"')
460     
461     text_to_add = ""
462     for product_name in get_SALOME_modules(config):
463         product_info = src.product.get_product_config(config, product_name)
464        
465         if src.product.product_is_smesh_plugin(product_info):
466             continue
467
468         if 'install_dir' in product_info and bool(product_info.install_dir):
469             if src.product.product_is_cpp(product_info):
470                 # cpp module
471                 for cpp_name in src.product.get_product_components(product_info):
472                     line_to_add = ("<module name=\"" + 
473                                    cpp_name + 
474                                    "\" gui=\"yes\" path=\"''' + "
475                                    "os.path.join(dir_bin_name, \"" + 
476                                    cpp_name + "\") + '''\"/>")
477             else:
478                 # regular module
479                 line_to_add = ("<module name=\"" + 
480                                product_name + 
481                                "\" gui=\"yes\" path=\"''' + "
482                                "os.path.join(dir_bin_name, \"" + 
483                                product_name + "\") + '''\"/>")
484             text_to_add += line_to_add + "\n"
485     
486     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
487     
488     tmp_file_path = os.path.join(file_dir, "create_appli.py")
489     ff = open(tmp_file_path, "w")
490     ff.write(filled_text)
491     ff.close()
492     
493     # change the rights in order to make the file executable for everybody
494     os.chmod(tmp_file_path,
495              stat.S_IRUSR |
496              stat.S_IRGRP |
497              stat.S_IROTH |
498              stat.S_IWUSR |
499              stat.S_IXUSR |
500              stat.S_IXGRP |
501              stat.S_IXOTH)
502     
503     return tmp_file_path
504
505 def binary_package(config, logger, options, tmp_working_dir):
506     '''Prepare a dictionary that stores all the needed directories and files to
507        add in a binary package.
508     
509     :param config Config: The global configuration.
510     :param logger Logger: the logging instance
511     :param options OptResult: the options of the launched command
512     :param tmp_working_dir str: The temporary local directory containing some 
513                                 specific directories or files needed in the 
514                                 binary package
515     :return: the dictionary that stores all the needed directories and files to
516              add in a binary package.
517              {label : (path_on_local_machine, path_in_archive)}
518     :rtype: dict
519     '''
520
521     # Get the list of product installation to add to the archive
522     l_products_name = sorted(config.APPLICATION.products.keys())
523     l_product_info = src.product.get_products_infos(l_products_name,
524                                                     config)
525     l_install_dir = []
526     l_source_dir = []
527     l_not_installed = []
528     l_sources_not_present = []
529     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
530     if ("APPLICATION" in config  and
531         "properties"  in config.APPLICATION  and
532         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
533         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
534             generate_mesa_launcher=True
535
536     for prod_name, prod_info in l_product_info:
537         # skip product with property not_in_package set to yes
538         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
539             continue  
540
541         # Add the sources of the products that have the property 
542         # sources_in_package : "yes"
543         if src.get_property_in_product_cfg(prod_info,
544                                            "sources_in_package") == "yes":
545             if os.path.exists(prod_info.source_dir):
546                 l_source_dir.append((prod_name, prod_info.source_dir))
547             else:
548                 l_sources_not_present.append(prod_name)
549
550         # ignore the native and fixed products for install directories
551         if (src.product.product_is_native(prod_info) 
552                 or src.product.product_is_fixed(prod_info)
553                 or not src.product.product_compiles(prod_info)):
554             continue
555         if src.product.check_installation(config, prod_info):
556             l_install_dir.append((prod_name, prod_info.install_dir))
557         else:
558             l_not_installed.append(prod_name)
559         
560         # Add also the cpp generated modules (if any)
561         if src.product.product_is_cpp(prod_info):
562             # cpp module
563             for name_cpp in src.product.get_product_components(prod_info):
564                 install_dir = os.path.join(config.APPLICATION.workdir,
565                                            config.INTERNAL.config.install_dir,
566                                            name_cpp) 
567                 if os.path.exists(install_dir):
568                     l_install_dir.append((name_cpp, install_dir))
569                 else:
570                     l_not_installed.append(name_cpp)
571         
572     # check the name of the directory that (could) contains the binaries 
573     # from previous detar
574     binaries_from_detar = os.path.join(
575                               config.APPLICATION.workdir,
576                               config.INTERNAL.config.binary_dir + config.VARS.dist)
577     if os.path.exists(binaries_from_detar):
578          logger.write("""
579 WARNING: existing binaries directory from previous detar installation:
580          %s
581          To make new package from this, you have to: 
582          1) install binaries in INSTALL directory with the script "install_bin.sh" 
583             see README file for more details
584          2) or recompile everything in INSTALL with "sat compile" command 
585             this step is long, and requires some linux packages to be installed 
586             on your system\n
587 """ % binaries_from_detar)
588     
589     # Print warning or error if there are some missing products
590     if len(l_not_installed) > 0:
591         text_missing_prods = ""
592         for p_name in l_not_installed:
593             text_missing_prods += "-" + p_name + "\n"
594         if not options.force_creation:
595             msg = _("ERROR: there are missing products installations:")
596             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
597                                      text_missing_prods),
598                          1)
599             return None
600         else:
601             msg = _("WARNING: there are missing products installations:")
602             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
603                                      text_missing_prods),
604                          1)
605
606     # Do the same for sources
607     if len(l_sources_not_present) > 0:
608         text_missing_prods = ""
609         for p_name in l_sources_not_present:
610             text_missing_prods += "-" + p_name + "\n"
611         if not options.force_creation:
612             msg = _("ERROR: there are missing products sources:")
613             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
614                                      text_missing_prods),
615                          1)
616             return None
617         else:
618             msg = _("WARNING: there are missing products sources:")
619             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
620                                      text_missing_prods),
621                          1)
622  
623     # construct the name of the directory that will contain the binaries
624     if src.architecture.is_windows():
625         binaries_dir_name = config.INTERNAL.config.binary_dir
626     else:
627         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
628     # construct the correlation table between the product names, there 
629     # actual install directories and there install directory in archive
630     d_products = {}
631     for prod_name, install_dir in l_install_dir:
632         path_in_archive = os.path.join(binaries_dir_name, os.path.basename(install_dir))
633         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
634         
635     for prod_name, source_dir in l_source_dir:
636         path_in_archive = os.path.join("SOURCES", prod_name)
637         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
638
639     # for packages of SALOME applications including KERNEL, 
640     # we produce a salome launcher or a virtual application (depending on salome version)
641     if 'KERNEL' in config.APPLICATION.products:
642         VersionSalome = src.get_salome_version(config)
643         # Case where SALOME has the launcher that uses the SalomeContext API
644         if VersionSalome >= 730:
645             # create the relative launcher and add it to the files to add
646             launcher_name = src.get_launcher_name(config)
647             launcher_package = produce_relative_launcher(config,
648                                                  logger,
649                                                  tmp_working_dir,
650                                                  launcher_name,
651                                                  binaries_dir_name)
652             d_products["launcher"] = (launcher_package, launcher_name)
653
654             # if the application contains mesa products, we generate in addition to the 
655             # classical salome launcher a launcher using mesa and called mesa_salome 
656             # (the mesa launcher will be used for remote usage through ssh).
657             if generate_mesa_launcher:
658                 #if there is one : store the use_mesa property
659                 restore_use_mesa_option=None
660                 if ('properties' in config.APPLICATION and 
661                     'use_mesa' in config.APPLICATION.properties):
662                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
663
664                 # activate mesa property, and generate a mesa launcher
665                 src.activate_mesa_property(config)  #activate use_mesa property
666                 launcher_mesa_name="mesa_"+launcher_name
667                 launcher_package_mesa = produce_relative_launcher(config,
668                                                      logger,
669                                                      tmp_working_dir,
670                                                      launcher_mesa_name,
671                                                      binaries_dir_name)
672                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
673
674                 # if there was a use_mesa value, we restore it
675                 # else we set it to the default value "no"
676                 if restore_use_mesa_option != None:
677                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
678                 else:
679                     config.APPLICATION.properties.use_mesa="no"
680
681             if options.sources:
682                 # if we mix binaries and sources, we add a copy of the launcher, 
683                 # prefixed  with "bin",in order to avoid clashes
684                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
685         else:
686             # Provide a script for the creation of an application EDF style
687             appli_script = product_appli_creation_script(config,
688                                                         logger,
689                                                         tmp_working_dir,
690                                                         binaries_dir_name)
691             
692             d_products["appli script"] = (appli_script, "create_appli.py")
693
694     # Put also the environment file
695     env_file = produce_relative_env_files(config,
696                                            logger,
697                                            tmp_working_dir,
698                                            binaries_dir_name)
699
700     if src.architecture.is_windows():
701       filename  = "env_launch.bat"
702     else:
703       filename  = "env_launch.sh"
704     d_products["environment file"] = (env_file, filename)      
705
706     return d_products
707
708 def source_package(sat, config, logger, options, tmp_working_dir):
709     '''Prepare a dictionary that stores all the needed directories and files to
710        add in a source package.
711     
712     :param config Config: The global configuration.
713     :param logger Logger: the logging instance
714     :param options OptResult: the options of the launched command
715     :param tmp_working_dir str: The temporary local directory containing some 
716                                 specific directories or files needed in the 
717                                 binary package
718     :return: the dictionary that stores all the needed directories and files to
719              add in a source package.
720              {label : (path_on_local_machine, path_in_archive)}
721     :rtype: dict
722     '''
723     
724     d_archives={}
725     # Get all the products that are prepared using an archive
726     # unless ftp mode is specified (in this case the user of the
727     # archive will get the sources through the ftp mode of sat prepare
728     if not options.ftp:
729         logger.write("Find archive products ... ")
730         d_archives, l_pinfo_vcs = get_archives(config, logger)
731         logger.write("Done\n")
732
733     d_archives_vcs = {}
734     if not options.with_vcs and len(l_pinfo_vcs) > 0:
735         # Make archives with the products that are not prepared using an archive
736         # (git, cvs, svn, etc)
737         logger.write("Construct archives for vcs products ... ")
738         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
739                                           sat,
740                                           config,
741                                           logger,
742                                           tmp_working_dir)
743         logger.write("Done\n")
744
745     # Create a project
746     logger.write("Create the project ... ")
747     d_project = create_project_for_src_package(config,
748                                                tmp_working_dir,
749                                                options.with_vcs,
750                                                options.ftp)
751     logger.write("Done\n")
752     
753     # Add salomeTools
754     tmp_sat = add_salomeTools(config, tmp_working_dir)
755     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
756     
757     # Add a sat symbolic link if not win
758     if not src.architecture.is_windows():
759         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
760         try:
761             t = os.getcwd()
762         except:
763             # In the jobs, os.getcwd() can fail
764             t = config.LOCAL.workdir
765         os.chdir(tmp_working_dir)
766         if os.path.lexists(tmp_satlink_path):
767             os.remove(tmp_satlink_path)
768         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
769         os.chdir(t)
770         
771         d_sat["sat link"] = (tmp_satlink_path, "sat")
772     
773     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
774     return d_source
775
776 def get_archives(config, logger):
777     '''Find all the products that are get using an archive and all the products
778        that are get using a vcs (git, cvs, svn) repository.
779     
780     :param config Config: The global configuration.
781     :param logger Logger: the logging instance
782     :return: the dictionary {name_product : 
783              (local path of its archive, path in the package of its archive )}
784              and the list of specific configuration corresponding to the vcs 
785              products
786     :rtype: (Dict, List)
787     '''
788     # Get the list of product informations
789     l_products_name = config.APPLICATION.products.keys()
790     l_product_info = src.product.get_products_infos(l_products_name,
791                                                     config)
792     d_archives = {}
793     l_pinfo_vcs = []
794     for p_name, p_info in l_product_info:
795         # skip product with property not_in_package set to yes
796         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
797             continue  
798         # ignore the native and fixed products
799         if (src.product.product_is_native(p_info) 
800                 or src.product.product_is_fixed(p_info)):
801             continue
802         if p_info.get_source == "archive":
803             archive_path = p_info.archive_info.archive_name
804             archive_name = os.path.basename(archive_path)
805             d_archives[p_name] = (archive_path,
806                                   os.path.join(ARCHIVE_DIR, archive_name))
807             if (src.appli_test_property(config,"pip", "yes") and 
808                 src.product.product_test_property(p_info,"pip", "yes")):
809                 # if pip mode is activated, and product is managed by pip
810                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
811                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
812                     "%s-%s*" % (p_info.name, p_info.version))
813                 pip_wheel_path=glob.glob(pip_wheel_pattern)
814                 msg_pip_not_found="Error in get_archive, pip wheel for "\
815                                   "product %s-%s was not found in %s directory"
816                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
817                                   "product %s-%s were found in %s directory"
818                 if len(pip_wheel_path)==0:
819                     raise src.SatException(msg_pip_not_found %\
820                         (p_info.name, p_info.version, pip_wheels_dir))
821                 if len(pip_wheel_path)>1:
822                     raise src.SatException(msg_pip_two_or_more %\
823                         (p_info.name, p_info.version, pip_wheels_dir))
824
825                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
826                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
827                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
828         else:
829             # this product is not managed by archive, 
830             # an archive of the vcs directory will be created by get_archive_vcs
831             l_pinfo_vcs.append((p_name, p_info)) 
832             
833     return d_archives, l_pinfo_vcs
834
835 def add_salomeTools(config, tmp_working_dir):
836     '''Prepare a version of salomeTools that has a specific local.pyconf file 
837        configured for a source package.
838
839     :param config Config: The global configuration.
840     :param tmp_working_dir str: The temporary local directory containing some 
841                                 specific directories or files needed in the 
842                                 source package
843     :return: The path to the local salomeTools directory to add in the package
844     :rtype: str
845     '''
846     # Copy sat in the temporary working directory
847     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
848     sat_running_path = src.Path(config.VARS.salometoolsway)
849     sat_running_path.copy(sat_tmp_path)
850     
851     # Update the local.pyconf file that contains the path to the project
852     local_pyconf_name = "local.pyconf"
853     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
854     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
855     # Remove the .pyconf file in the root directory of salomeTools if there is
856     # any. (For example when launching jobs, a pyconf file describing the jobs 
857     # can be here and is not useful) 
858     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
859     for file_or_dir in files_or_dir_SAT:
860         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
861             file_path = os.path.join(tmp_working_dir,
862                                      "salomeTools",
863                                      file_or_dir)
864             os.remove(file_path)
865     
866     ff = open(local_pyconf_file, "w")
867     ff.write(LOCAL_TEMPLATE)
868     ff.close()
869     
870     return sat_tmp_path.path
871
872 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
873     '''For sources package that require that all products are get using an 
874        archive, one has to create some archive for the vcs products.
875        So this method calls the clean and source command of sat and then create
876        the archives.
877
878     :param l_pinfo_vcs List: The list of specific configuration corresponding to
879                              each vcs product
880     :param sat Sat: The Sat instance that can be called to clean and source the
881                     products
882     :param config Config: The global configuration.
883     :param logger Logger: the logging instance
884     :param tmp_working_dir str: The temporary local directory containing some 
885                                 specific directories or files needed in the 
886                                 source package
887     :return: the dictionary that stores all the archives to add in the source 
888              package. {label : (path_on_local_machine, path_in_archive)}
889     :rtype: dict
890     '''
891     # clean the source directory of all the vcs products, then use the source 
892     # command and thus construct an archive that will not contain the patches
893     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
894     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
895       logger.write(_("\nclean sources\n"))
896       args_clean = config.VARS.application
897       args_clean += " --sources --products "
898       args_clean += ",".join(l_prod_names)
899       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
900       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
901     if True:
902       # source
903       logger.write(_("get sources\n"))
904       args_source = config.VARS.application
905       args_source += " --products "
906       args_source += ",".join(l_prod_names)
907       svgDir = sat.cfg.APPLICATION.workdir
908       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
909       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
910       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
911       # DBG.write("sat config id", id(sat.cfg), True)
912       # shit as config is not same id() as for sat.source()
913       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
914       import source
915       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
916       
917       # make the new archives
918       d_archives_vcs = {}
919       for pn, pinfo in l_pinfo_vcs:
920           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
921           logger.write("make archive vcs '%s'\n" % path_archive)
922           d_archives_vcs[pn] = (path_archive,
923                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
924       sat.cfg.APPLICATION.workdir = svgDir
925       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
926     return d_archives_vcs
927
928 def make_archive(prod_name, prod_info, where):
929     '''Create an archive of a product by searching its source directory.
930
931     :param prod_name str: The name of the product.
932     :param prod_info Config: The specific configuration corresponding to the 
933                              product
934     :param where str: The path of the repository where to put the resulting 
935                       archive
936     :return: The path of the resulting archive
937     :rtype: str
938     '''
939     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
940     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
941     local_path = prod_info.source_dir
942     tar_prod.add(local_path,
943                  arcname=prod_name,
944                  exclude=exclude_VCS_and_extensions)
945     tar_prod.close()
946     return path_targz_prod       
947
948 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
949     '''Create a specific project for a source package.
950
951     :param config Config: The global configuration.
952     :param tmp_working_dir str: The temporary local directory containing some 
953                                 specific directories or files needed in the 
954                                 source package
955     :param with_vcs boolean: True if the package is with vcs products (not 
956                              transformed into archive products)
957     :param with_ftp boolean: True if the package use ftp servers to get archives
958     :return: The dictionary 
959              {"project" : (produced project, project path in the archive)}
960     :rtype: Dict
961     '''
962
963     # Create in the working temporary directory the full project tree
964     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
965     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
966                                          "products")
967     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
968                                          "products",
969                                          "compil_scripts")
970     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
971                                          "products",
972                                          "env_scripts")
973     patches_tmp_dir = os.path.join(project_tmp_dir,
974                                          "products",
975                                          "patches")
976     application_tmp_dir = os.path.join(project_tmp_dir,
977                                          "applications")
978     for directory in [project_tmp_dir,
979                       compil_scripts_tmp_dir,
980                       env_scripts_tmp_dir,
981                       patches_tmp_dir,
982                       application_tmp_dir]:
983         src.ensure_path_exists(directory)
984
985     # Create the pyconf that contains the information of the project
986     project_pyconf_name = "project.pyconf"        
987     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
988     ff = open(project_pyconf_file, "w")
989     ff.write(PROJECT_TEMPLATE)
990     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
991         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
992         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
993             ftp_path=ftp_path+":"+ftpserver
994         ftp_path+='"'
995         ff.write("# ftp servers where to search for prerequisite archives\n")
996         ff.write(ftp_path)
997     # add licence paths if any
998     if len(config.PATHS.LICENCEPATH) > 0:  
999         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1000         for path in config.PATHS.LICENCEPATH[1:]:
1001             licence_path=licence_path+":"+path
1002         licence_path+='"'
1003         ff.write("\n# Where to search for licences\n")
1004         ff.write(licence_path)
1005         
1006
1007     ff.close()
1008     
1009     # Loop over the products to get there pyconf and all the scripts 
1010     # (compilation, environment, patches)
1011     # and create the pyconf file to add to the project
1012     lproducts_name = config.APPLICATION.products.keys()
1013     l_products = src.product.get_products_infos(lproducts_name, config)
1014     for p_name, p_info in l_products:
1015         # skip product with property not_in_package set to yes
1016         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1017             continue  
1018         find_product_scripts_and_pyconf(p_name,
1019                                         p_info,
1020                                         config,
1021                                         with_vcs,
1022                                         compil_scripts_tmp_dir,
1023                                         env_scripts_tmp_dir,
1024                                         patches_tmp_dir,
1025                                         products_pyconf_tmp_dir)
1026     
1027     find_application_pyconf(config, application_tmp_dir)
1028     
1029     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1030     return d_project
1031
1032 def find_product_scripts_and_pyconf(p_name,
1033                                     p_info,
1034                                     config,
1035                                     with_vcs,
1036                                     compil_scripts_tmp_dir,
1037                                     env_scripts_tmp_dir,
1038                                     patches_tmp_dir,
1039                                     products_pyconf_tmp_dir):
1040     '''Create a specific pyconf file for a given product. Get its environment 
1041        script, its compilation script and patches and put it in the temporary
1042        working directory. This method is used in the source package in order to
1043        construct the specific project.
1044
1045     :param p_name str: The name of the product.
1046     :param p_info Config: The specific configuration corresponding to the 
1047                              product
1048     :param config Config: The global configuration.
1049     :param with_vcs boolean: True if the package is with vcs products (not 
1050                              transformed into archive products)
1051     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1052                                        scripts directory of the project.
1053     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1054                                     directory of the project.
1055     :param patches_tmp_dir str: The path to the temporary patch scripts 
1056                                 directory of the project.
1057     :param products_pyconf_tmp_dir str: The path to the temporary product 
1058                                         scripts directory of the project.
1059     '''
1060     
1061     # read the pyconf of the product
1062     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1063
1064     # find the compilation script if any
1065     if src.product.product_has_script(p_info):
1066         compil_script_path = src.Path(p_info.compil_script)
1067         compil_script_path.copy(compil_scripts_tmp_dir)
1068
1069     # find the environment script if any
1070     if src.product.product_has_env_script(p_info):
1071         env_script_path = src.Path(p_info.environ.env_script)
1072         env_script_path.copy(env_scripts_tmp_dir)
1073
1074     # find the patches if any
1075     if src.product.product_has_patches(p_info):
1076         patches = src.pyconf.Sequence()
1077         for patch_path in p_info.patches:
1078             p_path = src.Path(patch_path)
1079             p_path.copy(patches_tmp_dir)
1080             patches.append(os.path.basename(patch_path), "")
1081
1082     if (not with_vcs) and src.product.product_is_vcs(p_info):
1083         # in non vcs mode, if the product is not archive, then make it become archive.
1084
1085         # depending upon the incremental mode, select impacted sections
1086         if "properties" in p_info and "incremental" in p_info.properties and\
1087             p_info.properties.incremental == "yes":
1088             sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1089         else:
1090             sections = [p_info.section]
1091         for section in sections:
1092             if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1093                 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1094                           (p_name,section))
1095                 product_pyconf_cfg[section].get_source = "archive"
1096                 if not "archive_info" in product_pyconf_cfg[section]:
1097                     product_pyconf_cfg[section].addMapping("archive_info",
1098                                         src.pyconf.Mapping(product_pyconf_cfg),
1099                                         "")
1100                     product_pyconf_cfg[section].archive_info.archive_name =\
1101                         p_info.name + ".tgz"
1102     
1103     # write the pyconf file to the temporary project location
1104     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1105                                            p_name + ".pyconf")
1106     ff = open(product_tmp_pyconf_path, 'w')
1107     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1108     product_pyconf_cfg.__save__(ff, 1)
1109     ff.close()
1110
1111 def find_application_pyconf(config, application_tmp_dir):
1112     '''Find the application pyconf file and put it in the specific temporary 
1113        directory containing the specific project of a source package.
1114
1115     :param config Config: The global configuration.
1116     :param application_tmp_dir str: The path to the temporary application 
1117                                        scripts directory of the project.
1118     '''
1119     # read the pyconf of the application
1120     application_name = config.VARS.application
1121     application_pyconf_path = src.find_file_in_lpath(
1122                                             application_name + ".pyconf",
1123                                             config.PATHS.APPLICATIONPATH)
1124     application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1125     
1126     # Change the workdir
1127     application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1128                                     application_pyconf_cfg,
1129                                     src.pyconf.DOLLAR,
1130                                     'VARS.salometoolsway + $VARS.sep + ".."')
1131
1132     # Prevent from compilation in base
1133     application_pyconf_cfg.APPLICATION.no_base = "yes"
1134     
1135     #remove products that are not in config (which were filtered by --without_properties)
1136     for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1137         if product_name not in config.APPLICATION.products.keys():
1138             application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1139
1140     # write the pyconf file to the temporary application location
1141     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1142                                                application_name + ".pyconf")
1143
1144     ff = open(application_tmp_pyconf_path, 'w')
1145     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1146     application_pyconf_cfg.__save__(ff, 1)
1147     ff.close()
1148
1149 def sat_package(config, tmp_working_dir, options, logger):
1150     '''Prepare a dictionary that stores all the needed directories and files to
1151        add in a salomeTool package.
1152     
1153     :param tmp_working_dir str: The temporary local working directory 
1154     :param options OptResult: the options of the launched command
1155     :return: the dictionary that stores all the needed directories and files to
1156              add in a salomeTool package.
1157              {label : (path_on_local_machine, path_in_archive)}
1158     :rtype: dict
1159     '''
1160     d_project = {}
1161
1162     # we include sat himself
1163     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1164
1165     # and we overwrite local.pyconf with a clean wersion.
1166     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1167     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1168     local_cfg = src.pyconf.Config(local_file_path)
1169     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1170     local_cfg.LOCAL["base"] = "default"
1171     local_cfg.LOCAL["workdir"] = "default"
1172     local_cfg.LOCAL["log_dir"] = "default"
1173     local_cfg.LOCAL["archive_dir"] = "default"
1174     local_cfg.LOCAL["VCS"] = "None"
1175     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1176
1177     # if the archive contains a project, we write its relative path in local.pyconf
1178     if options.project:
1179         project_arch_path = os.path.join("projects", options.project, 
1180                                          os.path.basename(options.project_file_path))
1181         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1182
1183     ff = open(local_pyconf_tmp_path, 'w')
1184     local_cfg.__save__(ff, 1)
1185     ff.close()
1186     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1187     return d_project
1188     
1189
1190 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1191     '''Prepare a dictionary that stores all the needed directories and files to
1192        add in a project package.
1193     
1194     :param project_file_path str: The path to the local project.
1195     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1196     :param tmp_working_dir str: The temporary local directory containing some 
1197                                 specific directories or files needed in the 
1198                                 project package
1199     :param embedded_in_sat boolean : the project package is embedded in a sat package
1200     :return: the dictionary that stores all the needed directories and files to
1201              add in a project package.
1202              {label : (path_on_local_machine, path_in_archive)}
1203     :rtype: dict
1204     '''
1205     d_project = {}
1206     # Read the project file and get the directories to add to the package
1207     
1208     try: 
1209       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1210     except:
1211       logger.write("""
1212 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1213       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1214       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1215     
1216     paths = {"APPLICATIONPATH" : "applications",
1217              "PRODUCTPATH" : "products",
1218              "JOBPATH" : "jobs",
1219              "MACHINEPATH" : "machines"}
1220     if not ftp_mode:
1221         paths["ARCHIVEPATH"] = "archives"
1222
1223     # Loop over the project paths and add it
1224     project_file_name = os.path.basename(project_file_path)
1225     for path in paths:
1226         if path not in project_pyconf_cfg:
1227             continue
1228         if embedded_in_sat:
1229             dest_path = os.path.join("projects", name_project, paths[path])
1230             project_file_dest = os.path.join("projects", name_project, project_file_name)
1231         else:
1232             dest_path = paths[path]
1233             project_file_dest = project_file_name
1234
1235         # Add the directory to the files to add in the package
1236         d_project[path] = (project_pyconf_cfg[path], dest_path)
1237
1238         # Modify the value of the path in the package
1239         project_pyconf_cfg[path] = src.pyconf.Reference(
1240                                     project_pyconf_cfg,
1241                                     src.pyconf.DOLLAR,
1242                                     'project_path + "/' + paths[path] + '"')
1243     
1244     # Modify some values
1245     if "project_path" not in project_pyconf_cfg:
1246         project_pyconf_cfg.addMapping("project_path",
1247                                       src.pyconf.Mapping(project_pyconf_cfg),
1248                                       "")
1249     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1250                                                            src.pyconf.DOLLAR,
1251                                                            'PWD')
1252     # we don't want to export these two fields
1253     project_pyconf_cfg.__delitem__("file_path")
1254     project_pyconf_cfg.__delitem__("PWD")
1255     if ftp_mode:
1256         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1257     
1258     # Write the project pyconf file
1259     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1260     ff = open(project_pyconf_tmp_path, 'w')
1261     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1262     project_pyconf_cfg.__save__(ff, 1)
1263     ff.close()
1264     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1265     
1266     return d_project
1267
1268 def add_readme(config, options, where):
1269     readme_path = os.path.join(where, "README")
1270     with codecs.open(readme_path, "w", 'utf-8') as f:
1271
1272     # templates for building the header
1273         readme_header="""
1274 # This package was generated with sat $version
1275 # Date: $date
1276 # User: $user
1277 # Distribution : $dist
1278
1279 In the following, $$ROOT represents the directory where you have installed 
1280 SALOME (the directory where this file is located).
1281
1282 """
1283         if src.architecture.is_windows():
1284             readme_header = readme_header.replace('$$ROOT','%ROOT%')
1285         readme_compilation_with_binaries="""
1286
1287 compilation based on the binaries used as prerequisites
1288 =======================================================
1289
1290 If you fail to compile the complete application (for example because
1291 you are not root on your system and cannot install missing packages), you
1292 may try a partial compilation based on the binaries.
1293 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1294 and do some substitutions on cmake and .la files (replace the build directories
1295 with local paths).
1296 The procedure to do it is:
1297  1) Remove or rename INSTALL directory if it exists
1298  2) Execute the shell script install_bin.sh:
1299  > cd $ROOT
1300  > ./install_bin.sh
1301  3) Use SalomeTool (as explained in Sources section) and compile only the 
1302     modules you need to (with -p option)
1303
1304 """
1305         readme_header_tpl=string.Template(readme_header)
1306         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1307                 "README_BIN.template")
1308         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1309                 "README_LAUNCHER.template")
1310         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1311                 "README_BIN_VIRTUAL_APP.template")
1312         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1313                 "README_SRC.template")
1314         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1315                 "README_PROJECT.template")
1316         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1317                 "README_SAT.template")
1318
1319         # prepare substitution dictionary
1320         d = dict()
1321         d['user'] = config.VARS.user
1322         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1323         d['version'] = src.get_salometool_version(config)
1324         d['dist'] = config.VARS.dist
1325         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1326
1327         if options.binaries or options.sources:
1328             d['application'] = config.VARS.application
1329             d['BINARIES']    = config.INTERNAL.config.install_dir
1330             d['SEPARATOR'] = config.VARS.sep
1331             if src.architecture.is_windows():
1332                 d['operatingSystem'] = 'Windows'
1333                 d['PYTHON3'] = 'python3'
1334                 d['ROOT']    = '%ROOT%'
1335             else:
1336                 d['operatingSystem'] = 'Linux'
1337                 d['PYTHON3'] = ''
1338                 d['ROOT']    = '$ROOT'
1339             f.write("# Application: " + d['application'] + "\n")
1340             if 'KERNEL' in config.APPLICATION.products:
1341                 VersionSalome = src.get_salome_version(config)
1342                 # Case where SALOME has the launcher that uses the SalomeContext API
1343                 if VersionSalome >= 730:
1344                     d['launcher'] = config.APPLICATION.profile.launcher_name
1345                 else:
1346                     d['virtual_app'] = 'runAppli' # this info is not used now)
1347
1348         # write the specific sections
1349         if options.binaries:
1350             f.write(src.template.substitute(readme_template_path_bin, d))
1351             if "virtual_app" in d:
1352                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1353             if "launcher" in d:
1354                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1355
1356         if options.sources:
1357             f.write(src.template.substitute(readme_template_path_src, d))
1358
1359         if options.binaries and options.sources and not src.architecture.is_windows():
1360             f.write(readme_compilation_with_binaries)
1361
1362         if options.project:
1363             f.write(src.template.substitute(readme_template_path_pro, d))
1364
1365         if options.sat:
1366             f.write(src.template.substitute(readme_template_path_sat, d))
1367     
1368     return readme_path
1369
1370 def update_config(config, prop, value):
1371     '''Remove from config.APPLICATION.products the products that have the property given as input.
1372     
1373     :param config Config: The global config.
1374     :param prop str: The property to filter
1375     :param value str: The value of the property to filter
1376     '''
1377     # if there is no APPLICATION (ex sat package -t) : nothing to do
1378     if "APPLICATION" in config:
1379         l_product_to_remove = []
1380         for product_name in config.APPLICATION.products.keys():
1381             prod_cfg = src.product.get_product_config(config, product_name)
1382             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1383                 l_product_to_remove.append(product_name)
1384         for product_name in l_product_to_remove:
1385             config.APPLICATION.products.__delitem__(product_name)
1386
1387 def description():
1388     '''method that is called when salomeTools is called with --help option.
1389     
1390     :return: The text to display for the package command description.
1391     :rtype: str
1392     '''
1393     return _("""
1394 The package command creates a tar file archive of a product.
1395 There are four kinds of archive, which can be mixed:
1396
1397  1 - The binary archive. 
1398      It contains the product installation directories plus a launcher.
1399  2 - The sources archive. 
1400      It contains the product archives, a project (the application plus salomeTools).
1401  3 - The project archive. 
1402      It contains a project (give the project file path as argument).
1403  4 - The salomeTools archive. 
1404      It contains code utility salomeTools.
1405
1406 example:
1407  >> sat package SALOME-master --binaries --sources""")
1408   
1409 def run(args, runner, logger):
1410     '''method that is called when salomeTools is called with package parameter.
1411     '''
1412     
1413     # Parse the options
1414     (options, args) = parser.parse_args(args)
1415
1416     # Check that a type of package is called, and only one
1417     all_option_types = (options.binaries,
1418                         options.sources,
1419                         options.project not in ["", None],
1420                         options.sat)
1421
1422     # Check if no option for package type
1423     if all_option_types.count(True) == 0:
1424         msg = _("Error: Precise a type for the package\nUse one of the "
1425                 "following options: --binaries, --sources, --project or"
1426                 " --salometools")
1427         logger.write(src.printcolors.printcError(msg), 1)
1428         logger.write("\n", 1)
1429         return 1
1430     
1431     # The repository where to put the package if not Binary or Source
1432     package_default_path = runner.cfg.LOCAL.workdir
1433     
1434     # if the package contains binaries or sources:
1435     if options.binaries or options.sources:
1436         # Check that the command has been called with an application
1437         src.check_config_has_application(runner.cfg)
1438
1439         # Display information
1440         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1441                                                     runner.cfg.VARS.application), 1)
1442         
1443         # Get the default directory where to put the packages
1444         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1445         src.ensure_path_exists(package_default_path)
1446         
1447     # if the package contains a project:
1448     if options.project:
1449         # check that the project is visible by SAT
1450         projectNameFile = options.project + ".pyconf"
1451         foundProject = None
1452         for i in runner.cfg.PROJECTS.project_file_paths:
1453             baseName = os.path.basename(i)
1454             if baseName == projectNameFile:
1455                 foundProject = i
1456                 break
1457
1458         if foundProject is None:
1459             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1460             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1461 known projects are:
1462 %(2)s
1463
1464 Please add it in file:
1465 %(3)s""" % \
1466                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1467             logger.write(src.printcolors.printcError(msg), 1)
1468             logger.write("\n", 1)
1469             return 1
1470         else:
1471             options.project_file_path = foundProject
1472             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1473     
1474     # Remove the products that are filtered by the --without_properties option
1475     if options.without_properties:
1476         app = runner.cfg.APPLICATION
1477         logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1478         prop, value = options.without_properties
1479         update_config(runner.cfg, prop, value)
1480         logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1481
1482     # Remove from config the products that have the not_in_package property
1483     update_config(runner.cfg, "not_in_package", "yes")
1484     
1485     # get the name of the archive or build it
1486     if options.name:
1487         if os.path.basename(options.name) == options.name:
1488             # only a name (not a path)
1489             archive_name = options.name           
1490             dir_name = package_default_path
1491         else:
1492             archive_name = os.path.basename(options.name)
1493             dir_name = os.path.dirname(options.name)
1494         
1495         # suppress extension
1496         if archive_name[-len(".tgz"):] == ".tgz":
1497             archive_name = archive_name[:-len(".tgz")]
1498         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1499             archive_name = archive_name[:-len(".tar.gz")]
1500         
1501     else:
1502         archive_name=""
1503         dir_name = package_default_path
1504         if options.binaries or options.sources:
1505             archive_name = runner.cfg.APPLICATION.name
1506
1507         if options.binaries:
1508             archive_name += "-"+runner.cfg.VARS.dist
1509             
1510         if options.sources:
1511             archive_name += "-SRC"
1512             if options.with_vcs:
1513                 archive_name += "-VCS"
1514
1515         if options.sat:
1516             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1517
1518         if options.project:
1519             if options.sat:
1520                 archive_name += "_" 
1521             project_name = options.project
1522             archive_name += ("satproject_" + project_name)
1523  
1524         if len(archive_name)==0: # no option worked 
1525             msg = _("Error: Cannot name the archive\n"
1526                     " check if at least one of the following options was "
1527                     "selected : --binaries, --sources, --project or"
1528                     " --salometools")
1529             logger.write(src.printcolors.printcError(msg), 1)
1530             logger.write("\n", 1)
1531             return 1
1532  
1533     path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1534     
1535     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1536
1537     # Create a working directory for all files that are produced during the
1538     # package creation and that will be removed at the end of the command
1539     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1540     src.ensure_path_exists(tmp_working_dir)
1541     logger.write("\n", 5)
1542     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1543     
1544     logger.write("\n", 3)
1545
1546     msg = _("Preparation of files to add to the archive")
1547     logger.write(src.printcolors.printcLabel(msg), 2)
1548     logger.write("\n", 2)
1549     
1550     d_files_to_add={}  # content of the archive
1551
1552     # a dict to hold paths that will need to be substitute for users recompilations
1553     d_paths_to_substitute={}  
1554
1555     if options.binaries:
1556         d_bin_files_to_add = binary_package(runner.cfg,
1557                                             logger,
1558                                             options,
1559                                             tmp_working_dir)
1560         # for all binaries dir, store the substitution that will be required 
1561         # for extra compilations
1562         for key in d_bin_files_to_add:
1563             if key.endswith("(bin)"):
1564                 source_dir = d_bin_files_to_add[key][0]
1565                 path_in_archive = d_bin_files_to_add[key][1].replace(
1566                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1567                    runner.cfg.INTERNAL.config.install_dir)
1568                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1569                     # if basename is the same we will just substitute the dirname 
1570                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1571                         os.path.dirname(path_in_archive)
1572                 else:
1573                     d_paths_to_substitute[source_dir]=path_in_archive
1574
1575         d_files_to_add.update(d_bin_files_to_add)
1576
1577     if options.sources:
1578         d_files_to_add.update(source_package(runner,
1579                                         runner.cfg,
1580                                         logger, 
1581                                         options,
1582                                         tmp_working_dir))
1583         if options.binaries:
1584             # for archives with bin and sources we provide a shell script able to 
1585             # install binaries for compilation
1586             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1587                                                       tmp_working_dir,
1588                                                       d_paths_to_substitute,
1589                                                       "install_bin.sh")
1590             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1591             logger.write("substitutions that need to be done later : \n", 5)
1592             logger.write(str(d_paths_to_substitute), 5)
1593             logger.write("\n", 5)
1594     else:
1595         # --salomeTool option is not considered when --sources is selected, as this option
1596         # already brings salomeTool!
1597         if options.sat:
1598             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1599                                   options, logger))
1600         
1601     if options.project:
1602         DBG.write("config for package %s" % project_name, runner.cfg)
1603         d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1604
1605     if not(d_files_to_add):
1606         msg = _("Error: Empty dictionnary to build the archive!\n")
1607         logger.write(src.printcolors.printcError(msg), 1)
1608         logger.write("\n", 1)
1609         return 1
1610
1611     # Add the README file in the package
1612     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1613     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1614
1615     # Add the additional files of option add_files
1616     if options.add_files:
1617         for file_path in options.add_files:
1618             if not os.path.exists(file_path):
1619                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1620                 continue
1621             file_name = os.path.basename(file_path)
1622             d_files_to_add[file_name] = (file_path, file_name)
1623
1624     logger.write("\n", 2)
1625     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1626     logger.write("\n", 2)
1627     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1628
1629     res = 0
1630     try:
1631         # Creating the object tarfile
1632         tar = tarfile.open(path_targz, mode='w:gz')
1633         
1634         # get the filtering function if needed
1635         filter_function = exclude_VCS_and_extensions
1636
1637         # Add the files to the tarfile object
1638         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1639         tar.close()
1640     except KeyboardInterrupt:
1641         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1642         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1643         # remove the working directory
1644         shutil.rmtree(tmp_working_dir)
1645         logger.write(_("OK"), 1)
1646         logger.write(_("\n"), 1)
1647         return 1
1648     
1649     # case if no application, only package sat as 'sat package -t'
1650     try:
1651         app = runner.cfg.APPLICATION
1652     except:
1653         app = None
1654
1655     # unconditionaly remove the tmp_local_working_dir
1656     if app is not None:
1657         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1658         if os.path.isdir(tmp_local_working_dir):
1659             shutil.rmtree(tmp_local_working_dir)
1660
1661     # remove the tmp directory, unless user has registered as developer
1662     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1663         shutil.rmtree(tmp_working_dir)
1664     
1665     # Print again the path of the package
1666     logger.write("\n", 2)
1667     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1668     
1669     return res