Salome HOME
sat #19218 : correction bug gestion out_dir_Path en postfix
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 BINARY = "binary"
35 SOURCE = "Source"
36 PROJECT = "Project"
37 SAT = "Sat"
38
39 ARCHIVE_DIR = "ARCHIVES"
40 PROJECT_DIR = "PROJECT"
41
42 IGNORED_DIRS = [".git", ".svn"]
43 IGNORED_EXTENSIONS = []
44
45 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
46
47 PROJECT_TEMPLATE = """#!/usr/bin/env python
48 #-*- coding:utf-8 -*-
49
50 # The path to the archive root directory
51 root_path : $PWD + "/../"
52 # path to the PROJECT
53 project_path : $PWD + "/"
54
55 # Where to search the archives of the products
56 ARCHIVEPATH : $root_path + "ARCHIVES"
57 # Where to search the pyconf of the applications
58 APPLICATIONPATH : $project_path + "applications/"
59 # Where to search the pyconf of the products
60 PRODUCTPATH : $project_path + "products/"
61 # Where to search the pyconf of the jobs of the project
62 JOBPATH : $project_path + "jobs/"
63 # Where to search the pyconf of the machines of the project
64 MACHINEPATH : $project_path + "machines/"
65 """
66
67 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
68 #-*- coding:utf-8 -*-
69
70   LOCAL :
71   {
72     base : 'default'
73     workdir : 'default'
74     log_dir : 'default'
75     archive_dir : 'default'
76     VCS : 'unknown'
77     tag : 'unknown'
78   }
79
80 PROJECTS :
81 {
82 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
83 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
84 }
85 """)
86
87 # Define all possible option for the package command :  sat package <options>
88 parser = src.options.Options()
89 parser.add_option('b', 'binaries', 'boolean', 'binaries',
90     _('Optional: Produce a binary package.'), False)
91 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
92     _('Optional: Only binary package: produce the archive even if '
93       'there are some missing products.'), False)
94 parser.add_option('s', 'sources', 'boolean', 'sources',
95     _('Optional: Produce a compilable archive of the sources of the '
96       'application.'), False)
97 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
98     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
99       'Sat prepare will use VCS mode instead to retrieve them'),
100     False)
101 parser.add_option('', 'ftp', 'boolean', 'ftp',
102     _('Optional: Do not embed archives for products in archive mode.' 
103     'Sat prepare will use ftp instead to retrieve them'),
104     False)
105 parser.add_option('p', 'project', 'string', 'project',
106     _('Optional: Produce an archive that contains a project.'), "")
107 parser.add_option('t', 'salometools', 'boolean', 'sat',
108     _('Optional: Produce an archive that contains salomeTools.'), False)
109 parser.add_option('n', 'name', 'string', 'name',
110     _('Optional: The name or full path of the archive.'), None)
111 parser.add_option('', 'add_files', 'list2', 'add_files',
112     _('Optional: The list of additional files to add to the archive.'), [])
113 parser.add_option('', 'without_properties', 'properties', 'without_properties',
114     _('Optional: Filter the products by their properties.\n\tSyntax: '
115       '--without_properties <property>:<value>'))
116
117
118 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
119     '''Create an archive containing all directories and files that are given in
120        the d_content argument.
121     
122     :param tar tarfile: The tarfile instance used to make the archive.
123     :param name_archive str: The name of the archive to make.
124     :param d_content dict: The dictionary that contain all directories and files
125                            to add in the archive.
126                            d_content[label] = 
127                                         (path_on_local_machine, path_in_archive)
128     :param logger Logger: the logging instance
129     :param f_exclude Function: the function that filters
130     :return: 0 if success, 1 if not.
131     :rtype: int
132     '''
133     # get the max length of the messages in order to make the display
134     max_len = len(max(d_content.keys(), key=len))
135     
136     success = 0
137     # loop over each directory or file stored in the d_content dictionary
138     names = sorted(d_content.keys())
139     DBG.write("add tar names", names)
140
141     # used to avoid duplications (for pip install in python, or single_install_dir cases)
142     already_added=set() 
143     for name in names:
144         # display information
145         len_points = max_len - len(name) + 3
146         local_path, archive_path = d_content[name]
147         in_archive = os.path.join(name_archive, archive_path)
148         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
149         # Get the local path and the path in archive 
150         # of the directory or file to add
151         # Add it in the archive
152         try:
153             key=local_path+"->"+in_archive
154             if key not in already_added:
155                 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
156                 already_added.add(key)
157             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
158         except Exception as e:
159             logger.write(src.printcolors.printcError(_("KO ")), 3)
160             logger.write(str(e), 3)
161             success = 1
162         logger.write("\n", 3)
163     return success
164
165 def exclude_VCS_and_extensions(filename):
166     ''' The function that is used to exclude from package the link to the 
167         VCS repositories (like .git)
168
169     :param filename Str: The filname to exclude (or not).
170     :return: True if the file has to be exclude
171     :rtype: Boolean
172     '''
173     for dir_name in IGNORED_DIRS:
174         if dir_name in filename:
175             return True
176     for extension in IGNORED_EXTENSIONS:
177         if filename.endswith(extension):
178             return True
179     return False
180
181 def produce_relative_launcher(config,
182                               logger,
183                               file_dir,
184                               file_name,
185                               binaries_dir_name):
186     '''Create a specific SALOME launcher for the binary package. This launcher 
187        uses relative paths.
188     
189     :param config Config: The global configuration.
190     :param logger Logger: the logging instance
191     :param file_dir str: the directory where to put the launcher
192     :param file_name str: The launcher name
193     :param binaries_dir_name str: the name of the repository where the binaries
194                                   are, in the archive.
195     :return: the path of the produced launcher
196     :rtype: str
197     '''
198     
199     # get KERNEL installation path 
200     kernel_info = src.product.get_product_config(config, "KERNEL")
201     kernel_base_name=os.path.basename(kernel_info.install_dir)
202     if kernel_base_name.startswith("config"):
203         # case of kernel installed in base. We remove "config-i"
204         kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
205     
206     kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
207
208     # set kernel bin dir (considering fhs property)
209     kernel_cfg = src.product.get_product_config(config, "KERNEL")
210     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
211         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
212     else:
213         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
214
215     # check if the application contains an application module
216     # check also if the application has a distene product, 
217     # in this case get its licence file name
218     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
219     salome_application_name="Not defined" 
220     distene_licence_file_name=False
221     for prod_name, prod_info in l_product_info:
222         # look for a "salome application" and a distene product
223         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
224             distene_licence_file_name = src.product.product_has_licence(prod_info, 
225                                             config.PATHS.LICENCEPATH) 
226         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
227             salome_application_name=prod_info.name
228
229     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
230     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
231     if salome_application_name == "Not defined":
232         app_root_dir=kernel_root_dir
233     else:
234         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
235
236     additional_env={}
237     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
238                                                    config.VARS.sep + bin_kernel_install_dir
239     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
240         additional_env['sat_python_version'] = 3
241     else:
242         additional_env['sat_python_version'] = 2
243
244     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
245
246     # create an environment file writer
247     writer = src.environment.FileEnvWriter(config,
248                                            logger,
249                                            file_dir,
250                                            src_root=None,
251                                            env_info=None)
252     
253     filepath = os.path.join(file_dir, file_name)
254     # Write
255     writer.write_env_file(filepath,
256                           False,  # for launch
257                           "cfgForPy",
258                           additional_env=additional_env,
259                           no_path_init="False",
260                           for_package = binaries_dir_name)
261     
262     # Little hack to put out_dir_Path outside the strings
263     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
264     src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
265     
266     # A hack to put a call to a file for distene licence.
267     # It does nothing to an application that has no distene product
268     if distene_licence_file_name:
269         logger.write("Application has a distene licence file! We use it in package launcher", 5)
270         hack_for_distene_licence(filepath, distene_licence_file_name)
271        
272     # change the rights in order to make the file executable for everybody
273     os.chmod(filepath,
274              stat.S_IRUSR |
275              stat.S_IRGRP |
276              stat.S_IROTH |
277              stat.S_IWUSR |
278              stat.S_IXUSR |
279              stat.S_IXGRP |
280              stat.S_IXOTH)
281
282     return filepath
283
284 def hack_for_distene_licence(filepath, licence_file):
285     '''Replace the distene licence env variable by a call to a file.
286     
287     :param filepath Str: The path to the launcher to modify.
288     '''  
289     shutil.move(filepath, filepath + "_old")
290     fileout= filepath
291     filein = filepath + "_old"
292     fin = open(filein, "r")
293     fout = open(fileout, "w")
294     text = fin.readlines()
295     # Find the Distene section
296     num_line = -1
297     for i,line in enumerate(text):
298         if "# Set DISTENE License" in line:
299             num_line = i
300             break
301     if num_line == -1:
302         # No distene product, there is nothing to do
303         fin.close()
304         for line in text:
305             fout.write(line)
306         fout.close()
307         return
308     del text[num_line +1]
309     del text[num_line +1]
310     text_to_insert ="""    try:
311         distene_licence_file=r"%s"
312         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
313             import importlib.util
314             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
315             distene=importlib.util.module_from_spec(spec_dist)
316             spec_dist.loader.exec_module(distene)
317         else:
318             import imp
319             distene = imp.load_source('distene_licence', distene_licence_file)
320         distene.set_distene_variables(context)
321     except:
322         pass\n"""  % licence_file
323     text.insert(num_line + 1, text_to_insert)
324     for line in text:
325         fout.write(line)
326     fin.close()    
327     fout.close()
328     return
329     
330 def produce_relative_env_files(config,
331                               logger,
332                               file_dir,
333                               binaries_dir_name):
334     '''Create some specific environment files for the binary package. These 
335        files use relative paths.
336     
337     :param config Config: The global configuration.
338     :param logger Logger: the logging instance
339     :param file_dir str: the directory where to put the files
340     :param binaries_dir_name str: the name of the repository where the binaries
341                                   are, in the archive.
342     :return: the list of path of the produced environment files
343     :rtype: List
344     '''  
345     # create an environment file writer
346     writer = src.environment.FileEnvWriter(config,
347                                            logger,
348                                            file_dir,
349                                            src_root=None)
350     
351     if src.architecture.is_windows():
352       shell = "bat"
353       filename  = "env_launch.bat"
354     else:
355       shell = "bash"
356       filename  = "env_launch.sh"
357
358     # Write
359     filepath = writer.write_env_file(filename,
360                           False, # for launch
361                           shell,
362                           for_package = binaries_dir_name)
363
364     # Little hack to put out_dir_Path as environment variable
365     if src.architecture.is_windows() :
366       src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
367       src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
368       src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
369     else:
370       src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
371       src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
372
373     # change the rights in order to make the file executable for everybody
374     os.chmod(filepath,
375              stat.S_IRUSR |
376              stat.S_IRGRP |
377              stat.S_IROTH |
378              stat.S_IWUSR |
379              stat.S_IXUSR |
380              stat.S_IXGRP |
381              stat.S_IXOTH)
382     
383     return filepath
384
385 def produce_install_bin_file(config,
386                              logger,
387                              file_dir,
388                              d_sub,
389                              file_name):
390     '''Create a bash shell script which do substitutions in BIRARIES dir 
391        in order to use it for extra compilations.
392     
393     :param config Config: The global configuration.
394     :param logger Logger: the logging instance
395     :param file_dir str: the directory where to put the files
396     :param d_sub, dict: the dictionnary that contains the substitutions to be done
397     :param file_name str: the name of the install script file
398     :return: the produced file
399     :rtype: str
400     '''  
401     # Write
402     filepath = os.path.join(file_dir, file_name)
403     # open the file and write into it
404     # use codec utf-8 as sat variables are in unicode
405     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
406         installbin_template_path = os.path.join(config.VARS.internal_dir,
407                                         "INSTALL_BIN.template")
408         
409         # build the name of the directory that will contain the binaries
410         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
411         # build the substitution loop
412         loop_cmd = "for f in $(grep -RIl"
413         for key in d_sub:
414             loop_cmd += " -e "+ key
415         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
416                     '); do\n     sed -i "\n'
417         for key in d_sub:
418             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
419         loop_cmd += '            " $f\ndone'
420
421         d={}
422         d["BINARIES_DIR"] = binaries_dir_name
423         d["SUBSTITUTION_LOOP"]=loop_cmd
424         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
425         
426         # substitute the template and write it in file
427         content=src.template.substitute(installbin_template_path, d)
428         installbin_file.write(content)
429         # change the rights in order to make the file executable for everybody
430         os.chmod(filepath,
431                  stat.S_IRUSR |
432                  stat.S_IRGRP |
433                  stat.S_IROTH |
434                  stat.S_IWUSR |
435                  stat.S_IXUSR |
436                  stat.S_IXGRP |
437                  stat.S_IXOTH)
438     
439     return filepath
440
441 def product_appli_creation_script(config,
442                                   logger,
443                                   file_dir,
444                                   binaries_dir_name):
445     '''Create a script that can produce an application (EDF style) in the binary
446        package.
447     
448     :param config Config: The global configuration.
449     :param logger Logger: the logging instance
450     :param file_dir str: the directory where to put the file
451     :param binaries_dir_name str: the name of the repository where the binaries
452                                   are, in the archive.
453     :return: the path of the produced script file
454     :rtype: Str
455     '''
456     template_name = "create_appli.py.for_bin_packages.template"
457     template_path = os.path.join(config.VARS.internal_dir, template_name)
458     text_to_fill = open(template_path, "r").read()
459     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
460                                         '"' + binaries_dir_name + '"')
461     
462     text_to_add = ""
463     for product_name in get_SALOME_modules(config):
464         product_info = src.product.get_product_config(config, product_name)
465        
466         if src.product.product_is_smesh_plugin(product_info):
467             continue
468
469         if 'install_dir' in product_info and bool(product_info.install_dir):
470             if src.product.product_is_cpp(product_info):
471                 # cpp module
472                 for cpp_name in src.product.get_product_components(product_info):
473                     line_to_add = ("<module name=\"" + 
474                                    cpp_name + 
475                                    "\" gui=\"yes\" path=\"''' + "
476                                    "os.path.join(dir_bin_name, \"" + 
477                                    cpp_name + "\") + '''\"/>")
478             else:
479                 # regular module
480                 line_to_add = ("<module name=\"" + 
481                                product_name + 
482                                "\" gui=\"yes\" path=\"''' + "
483                                "os.path.join(dir_bin_name, \"" + 
484                                product_name + "\") + '''\"/>")
485             text_to_add += line_to_add + "\n"
486     
487     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
488     
489     tmp_file_path = os.path.join(file_dir, "create_appli.py")
490     ff = open(tmp_file_path, "w")
491     ff.write(filled_text)
492     ff.close()
493     
494     # change the rights in order to make the file executable for everybody
495     os.chmod(tmp_file_path,
496              stat.S_IRUSR |
497              stat.S_IRGRP |
498              stat.S_IROTH |
499              stat.S_IWUSR |
500              stat.S_IXUSR |
501              stat.S_IXGRP |
502              stat.S_IXOTH)
503     
504     return tmp_file_path
505
506 def binary_package(config, logger, options, tmp_working_dir):
507     '''Prepare a dictionary that stores all the needed directories and files to
508        add in a binary package.
509     
510     :param config Config: The global configuration.
511     :param logger Logger: the logging instance
512     :param options OptResult: the options of the launched command
513     :param tmp_working_dir str: The temporary local directory containing some 
514                                 specific directories or files needed in the 
515                                 binary package
516     :return: the dictionary that stores all the needed directories and files to
517              add in a binary package.
518              {label : (path_on_local_machine, path_in_archive)}
519     :rtype: dict
520     '''
521
522     # Get the list of product installation to add to the archive
523     l_products_name = sorted(config.APPLICATION.products.keys())
524     l_product_info = src.product.get_products_infos(l_products_name,
525                                                     config)
526     l_install_dir = []
527     l_source_dir = []
528     l_not_installed = []
529     l_sources_not_present = []
530     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
531     if ("APPLICATION" in config  and
532         "properties"  in config.APPLICATION  and
533         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
534         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
535             generate_mesa_launcher=True
536
537     for prod_name, prod_info in l_product_info:
538         # skip product with property not_in_package set to yes
539         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
540             continue  
541
542         # Add the sources of the products that have the property 
543         # sources_in_package : "yes"
544         if src.get_property_in_product_cfg(prod_info,
545                                            "sources_in_package") == "yes":
546             if os.path.exists(prod_info.source_dir):
547                 l_source_dir.append((prod_name, prod_info.source_dir))
548             else:
549                 l_sources_not_present.append(prod_name)
550
551         # ignore the native and fixed products for install directories
552         if (src.product.product_is_native(prod_info) 
553                 or src.product.product_is_fixed(prod_info)
554                 or not src.product.product_compiles(prod_info)):
555             continue
556         if src.product.check_installation(config, prod_info):
557             l_install_dir.append((prod_name, prod_info.install_dir))
558         else:
559             l_not_installed.append(prod_name)
560         
561         # Add also the cpp generated modules (if any)
562         if src.product.product_is_cpp(prod_info):
563             # cpp module
564             for name_cpp in src.product.get_product_components(prod_info):
565                 install_dir = os.path.join(config.APPLICATION.workdir,
566                                            config.INTERNAL.config.install_dir,
567                                            name_cpp) 
568                 if os.path.exists(install_dir):
569                     l_install_dir.append((name_cpp, install_dir))
570                 else:
571                     l_not_installed.append(name_cpp)
572         
573     # check the name of the directory that (could) contains the binaries 
574     # from previous detar
575     binaries_from_detar = os.path.join(
576                               config.APPLICATION.workdir,
577                               config.INTERNAL.config.binary_dir + config.VARS.dist)
578     if os.path.exists(binaries_from_detar):
579          logger.write("""
580 WARNING: existing binaries directory from previous detar installation:
581          %s
582          To make new package from this, you have to: 
583          1) install binaries in INSTALL directory with the script "install_bin.sh" 
584             see README file for more details
585          2) or recompile everything in INSTALL with "sat compile" command 
586             this step is long, and requires some linux packages to be installed 
587             on your system\n
588 """ % binaries_from_detar)
589     
590     # Print warning or error if there are some missing products
591     if len(l_not_installed) > 0:
592         text_missing_prods = ""
593         for p_name in l_not_installed:
594             text_missing_prods += " - " + p_name + "\n"
595         if not options.force_creation:
596             msg = _("ERROR: there are missing product installations:")
597             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
598                                      text_missing_prods),
599                          1)
600             raise src.SatException(msg)
601         else:
602             msg = _("WARNING: there are missing products installations:")
603             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
604                                      text_missing_prods),
605                          1)
606
607     # Do the same for sources
608     if len(l_sources_not_present) > 0:
609         text_missing_prods = ""
610         for p_name in l_sources_not_present:
611             text_missing_prods += "-" + p_name + "\n"
612         if not options.force_creation:
613             msg = _("ERROR: there are missing product sources:")
614             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
615                                      text_missing_prods),
616                          1)
617             raise src.SatException(msg)
618         else:
619             msg = _("WARNING: there are missing products sources:")
620             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
621                                      text_missing_prods),
622                          1)
623  
624     # construct the name of the directory that will contain the binaries
625     if src.architecture.is_windows():
626         binaries_dir_name = config.INTERNAL.config.binary_dir
627     else:
628         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
629     # construct the correlation table between the product names, there 
630     # actual install directories and there install directory in archive
631     d_products = {}
632     for prod_name, install_dir in l_install_dir:
633         prod_base_name=os.path.basename(install_dir)
634         if prod_base_name.startswith("config"):
635             # case of a products installed in base. We remove "config-i"
636             prod_base_name=os.path.basename(os.path.dirname(install_dir))
637         path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
638         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
639         
640     for prod_name, source_dir in l_source_dir:
641         path_in_archive = os.path.join("SOURCES", prod_name)
642         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
643
644     # for packages of SALOME applications including KERNEL, 
645     # we produce a salome launcher or a virtual application (depending on salome version)
646     if 'KERNEL' in config.APPLICATION.products:
647         VersionSalome = src.get_salome_version(config)
648         # Case where SALOME has the launcher that uses the SalomeContext API
649         if VersionSalome >= 730:
650             # create the relative launcher and add it to the files to add
651             launcher_name = src.get_launcher_name(config)
652             launcher_package = produce_relative_launcher(config,
653                                                  logger,
654                                                  tmp_working_dir,
655                                                  launcher_name,
656                                                  binaries_dir_name)
657             d_products["launcher"] = (launcher_package, launcher_name)
658
659             # if the application contains mesa products, we generate in addition to the 
660             # classical salome launcher a launcher using mesa and called mesa_salome 
661             # (the mesa launcher will be used for remote usage through ssh).
662             if generate_mesa_launcher:
663                 #if there is one : store the use_mesa property
664                 restore_use_mesa_option=None
665                 if ('properties' in config.APPLICATION and 
666                     'use_mesa' in config.APPLICATION.properties):
667                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
668
669                 # activate mesa property, and generate a mesa launcher
670                 src.activate_mesa_property(config)  #activate use_mesa property
671                 launcher_mesa_name="mesa_"+launcher_name
672                 launcher_package_mesa = produce_relative_launcher(config,
673                                                      logger,
674                                                      tmp_working_dir,
675                                                      launcher_mesa_name,
676                                                      binaries_dir_name)
677                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
678
679                 # if there was a use_mesa value, we restore it
680                 # else we set it to the default value "no"
681                 if restore_use_mesa_option != None:
682                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
683                 else:
684                     config.APPLICATION.properties.use_mesa="no"
685
686             if options.sources:
687                 # if we mix binaries and sources, we add a copy of the launcher, 
688                 # prefixed  with "bin",in order to avoid clashes
689                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
690         else:
691             # Provide a script for the creation of an application EDF style
692             appli_script = product_appli_creation_script(config,
693                                                         logger,
694                                                         tmp_working_dir,
695                                                         binaries_dir_name)
696             
697             d_products["appli script"] = (appli_script, "create_appli.py")
698
699     # Put also the environment file
700     env_file = produce_relative_env_files(config,
701                                            logger,
702                                            tmp_working_dir,
703                                            binaries_dir_name)
704
705     if src.architecture.is_windows():
706       filename  = "env_launch.bat"
707     else:
708       filename  = "env_launch.sh"
709     d_products["environment file"] = (env_file, filename)      
710     return d_products
711
712 def source_package(sat, config, logger, options, tmp_working_dir):
713     '''Prepare a dictionary that stores all the needed directories and files to
714        add in a source package.
715     
716     :param config Config: The global configuration.
717     :param logger Logger: the logging instance
718     :param options OptResult: the options of the launched command
719     :param tmp_working_dir str: The temporary local directory containing some 
720                                 specific directories or files needed in the 
721                                 binary package
722     :return: the dictionary that stores all the needed directories and files to
723              add in a source package.
724              {label : (path_on_local_machine, path_in_archive)}
725     :rtype: dict
726     '''
727     
728     d_archives={}
729     # Get all the products that are prepared using an archive
730     # unless ftp mode is specified (in this case the user of the
731     # archive will get the sources through the ftp mode of sat prepare
732     if not options.ftp:
733         logger.write("Find archive products ... ")
734         d_archives, l_pinfo_vcs = get_archives(config, logger)
735         logger.write("Done\n")
736
737     d_archives_vcs = {}
738     if not options.with_vcs and len(l_pinfo_vcs) > 0:
739         # Make archives with the products that are not prepared using an archive
740         # (git, cvs, svn, etc)
741         logger.write("Construct archives for vcs products ... ")
742         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
743                                           sat,
744                                           config,
745                                           logger,
746                                           tmp_working_dir)
747         logger.write("Done\n")
748
749     # Create a project
750     logger.write("Create the project ... ")
751     d_project = create_project_for_src_package(config,
752                                                tmp_working_dir,
753                                                options.with_vcs,
754                                                options.ftp)
755     logger.write("Done\n")
756     
757     # Add salomeTools
758     tmp_sat = add_salomeTools(config, tmp_working_dir)
759     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
760     
761     # Add a sat symbolic link if not win
762     if not src.architecture.is_windows():
763         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
764         try:
765             t = os.getcwd()
766         except:
767             # In the jobs, os.getcwd() can fail
768             t = config.LOCAL.workdir
769         os.chdir(tmp_working_dir)
770         if os.path.lexists(tmp_satlink_path):
771             os.remove(tmp_satlink_path)
772         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
773         os.chdir(t)
774         
775         d_sat["sat link"] = (tmp_satlink_path, "sat")
776     
777     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
778     return d_source
779
780 def get_archives(config, logger):
781     '''Find all the products that are get using an archive and all the products
782        that are get using a vcs (git, cvs, svn) repository.
783     
784     :param config Config: The global configuration.
785     :param logger Logger: the logging instance
786     :return: the dictionary {name_product : 
787              (local path of its archive, path in the package of its archive )}
788              and the list of specific configuration corresponding to the vcs 
789              products
790     :rtype: (Dict, List)
791     '''
792     # Get the list of product informations
793     l_products_name = config.APPLICATION.products.keys()
794     l_product_info = src.product.get_products_infos(l_products_name,
795                                                     config)
796     d_archives = {}
797     l_pinfo_vcs = []
798     for p_name, p_info in l_product_info:
799         # skip product with property not_in_package set to yes
800         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
801             continue  
802         # ignore the native and fixed products
803         if (src.product.product_is_native(p_info) 
804                 or src.product.product_is_fixed(p_info)):
805             continue
806         if p_info.get_source == "archive":
807             archive_path = p_info.archive_info.archive_name
808             archive_name = os.path.basename(archive_path)
809             d_archives[p_name] = (archive_path,
810                                   os.path.join(ARCHIVE_DIR, archive_name))
811             if (src.appli_test_property(config,"pip", "yes") and 
812                 src.product.product_test_property(p_info,"pip", "yes")):
813                 # if pip mode is activated, and product is managed by pip
814                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
815                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
816                     "%s-%s*" % (p_info.name, p_info.version))
817                 pip_wheel_path=glob.glob(pip_wheel_pattern)
818                 msg_pip_not_found="Error in get_archive, pip wheel for "\
819                                   "product %s-%s was not found in %s directory"
820                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
821                                   "product %s-%s were found in %s directory"
822                 if len(pip_wheel_path)==0:
823                     raise src.SatException(msg_pip_not_found %\
824                         (p_info.name, p_info.version, pip_wheels_dir))
825                 if len(pip_wheel_path)>1:
826                     raise src.SatException(msg_pip_two_or_more %\
827                         (p_info.name, p_info.version, pip_wheels_dir))
828
829                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
830                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
831                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
832         else:
833             # this product is not managed by archive, 
834             # an archive of the vcs directory will be created by get_archive_vcs
835             l_pinfo_vcs.append((p_name, p_info)) 
836             
837     return d_archives, l_pinfo_vcs
838
839 def add_salomeTools(config, tmp_working_dir):
840     '''Prepare a version of salomeTools that has a specific local.pyconf file 
841        configured for a source package.
842
843     :param config Config: The global configuration.
844     :param tmp_working_dir str: The temporary local directory containing some 
845                                 specific directories or files needed in the 
846                                 source package
847     :return: The path to the local salomeTools directory to add in the package
848     :rtype: str
849     '''
850     # Copy sat in the temporary working directory
851     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
852     sat_running_path = src.Path(config.VARS.salometoolsway)
853     sat_running_path.copy(sat_tmp_path)
854     
855     # Update the local.pyconf file that contains the path to the project
856     local_pyconf_name = "local.pyconf"
857     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
858     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
859     # Remove the .pyconf file in the root directory of salomeTools if there is
860     # any. (For example when launching jobs, a pyconf file describing the jobs 
861     # can be here and is not useful) 
862     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
863     for file_or_dir in files_or_dir_SAT:
864         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
865             file_path = os.path.join(tmp_working_dir,
866                                      "salomeTools",
867                                      file_or_dir)
868             os.remove(file_path)
869     
870     ff = open(local_pyconf_file, "w")
871     ff.write(LOCAL_TEMPLATE)
872     ff.close()
873     
874     return sat_tmp_path.path
875
876 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
877     '''For sources package that require that all products are get using an 
878        archive, one has to create some archive for the vcs products.
879        So this method calls the clean and source command of sat and then create
880        the archives.
881
882     :param l_pinfo_vcs List: The list of specific configuration corresponding to
883                              each vcs product
884     :param sat Sat: The Sat instance that can be called to clean and source the
885                     products
886     :param config Config: The global configuration.
887     :param logger Logger: the logging instance
888     :param tmp_working_dir str: The temporary local directory containing some 
889                                 specific directories or files needed in the 
890                                 source package
891     :return: the dictionary that stores all the archives to add in the source 
892              package. {label : (path_on_local_machine, path_in_archive)}
893     :rtype: dict
894     '''
895     # clean the source directory of all the vcs products, then use the source 
896     # command and thus construct an archive that will not contain the patches
897     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
898     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
899       logger.write(_("\nclean sources\n"))
900       args_clean = config.VARS.application
901       args_clean += " --sources --products "
902       args_clean += ",".join(l_prod_names)
903       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
904       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
905     if True:
906       # source
907       logger.write(_("get sources\n"))
908       args_source = config.VARS.application
909       args_source += " --products "
910       args_source += ",".join(l_prod_names)
911       svgDir = sat.cfg.APPLICATION.workdir
912       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
913       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
914       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
915       # DBG.write("sat config id", id(sat.cfg), True)
916       # shit as config is not same id() as for sat.source()
917       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
918       import source
919       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
920       
921       # make the new archives
922       d_archives_vcs = {}
923       for pn, pinfo in l_pinfo_vcs:
924           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
925           logger.write("make archive vcs '%s'\n" % path_archive)
926           d_archives_vcs[pn] = (path_archive,
927                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
928       sat.cfg.APPLICATION.workdir = svgDir
929       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
930     return d_archives_vcs
931
932 def make_archive(prod_name, prod_info, where):
933     '''Create an archive of a product by searching its source directory.
934
935     :param prod_name str: The name of the product.
936     :param prod_info Config: The specific configuration corresponding to the 
937                              product
938     :param where str: The path of the repository where to put the resulting 
939                       archive
940     :return: The path of the resulting archive
941     :rtype: str
942     '''
943     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
944     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
945     local_path = prod_info.source_dir
946     tar_prod.add(local_path,
947                  arcname=prod_name,
948                  exclude=exclude_VCS_and_extensions)
949     tar_prod.close()
950     return path_targz_prod       
951
952 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
953     '''Create a specific project for a source package.
954
955     :param config Config: The global configuration.
956     :param tmp_working_dir str: The temporary local directory containing some 
957                                 specific directories or files needed in the 
958                                 source package
959     :param with_vcs boolean: True if the package is with vcs products (not 
960                              transformed into archive products)
961     :param with_ftp boolean: True if the package use ftp servers to get archives
962     :return: The dictionary 
963              {"project" : (produced project, project path in the archive)}
964     :rtype: Dict
965     '''
966
967     # Create in the working temporary directory the full project tree
968     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
969     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
970                                          "products")
971     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
972                                          "products",
973                                          "compil_scripts")
974     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
975                                          "products",
976                                          "env_scripts")
977     patches_tmp_dir = os.path.join(project_tmp_dir,
978                                          "products",
979                                          "patches")
980     application_tmp_dir = os.path.join(project_tmp_dir,
981                                          "applications")
982     for directory in [project_tmp_dir,
983                       compil_scripts_tmp_dir,
984                       env_scripts_tmp_dir,
985                       patches_tmp_dir,
986                       application_tmp_dir]:
987         src.ensure_path_exists(directory)
988
989     # Create the pyconf that contains the information of the project
990     project_pyconf_name = "project.pyconf"        
991     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
992     ff = open(project_pyconf_file, "w")
993     ff.write(PROJECT_TEMPLATE)
994     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
995         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
996         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
997             ftp_path=ftp_path+":"+ftpserver
998         ftp_path+='"'
999         ff.write("# ftp servers where to search for prerequisite archives\n")
1000         ff.write(ftp_path)
1001     # add licence paths if any
1002     if len(config.PATHS.LICENCEPATH) > 0:  
1003         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1004         for path in config.PATHS.LICENCEPATH[1:]:
1005             licence_path=licence_path+":"+path
1006         licence_path+='"'
1007         ff.write("\n# Where to search for licences\n")
1008         ff.write(licence_path)
1009         
1010
1011     ff.close()
1012     
1013     # Loop over the products to get there pyconf and all the scripts 
1014     # (compilation, environment, patches)
1015     # and create the pyconf file to add to the project
1016     lproducts_name = config.APPLICATION.products.keys()
1017     l_products = src.product.get_products_infos(lproducts_name, config)
1018     for p_name, p_info in l_products:
1019         # skip product with property not_in_package set to yes
1020         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1021             continue  
1022         find_product_scripts_and_pyconf(p_name,
1023                                         p_info,
1024                                         config,
1025                                         with_vcs,
1026                                         compil_scripts_tmp_dir,
1027                                         env_scripts_tmp_dir,
1028                                         patches_tmp_dir,
1029                                         products_pyconf_tmp_dir)
1030     
1031     # for the application pyconf, we write directly the config
1032     # don't search for the original pyconf file
1033     # to avoid problems with overwrite sections and rm_products key
1034     write_application_pyconf(config, application_tmp_dir)
1035     
1036     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1037     return d_project
1038
1039 def find_product_scripts_and_pyconf(p_name,
1040                                     p_info,
1041                                     config,
1042                                     with_vcs,
1043                                     compil_scripts_tmp_dir,
1044                                     env_scripts_tmp_dir,
1045                                     patches_tmp_dir,
1046                                     products_pyconf_tmp_dir):
1047     '''Create a specific pyconf file for a given product. Get its environment 
1048        script, its compilation script and patches and put it in the temporary
1049        working directory. This method is used in the source package in order to
1050        construct the specific project.
1051
1052     :param p_name str: The name of the product.
1053     :param p_info Config: The specific configuration corresponding to the 
1054                              product
1055     :param config Config: The global configuration.
1056     :param with_vcs boolean: True if the package is with vcs products (not 
1057                              transformed into archive products)
1058     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1059                                        scripts directory of the project.
1060     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1061                                     directory of the project.
1062     :param patches_tmp_dir str: The path to the temporary patch scripts 
1063                                 directory of the project.
1064     :param products_pyconf_tmp_dir str: The path to the temporary product 
1065                                         scripts directory of the project.
1066     '''
1067     
1068     # read the pyconf of the product
1069     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1070
1071     # find the compilation script if any
1072     if src.product.product_has_script(p_info):
1073         compil_script_path = src.Path(p_info.compil_script)
1074         compil_script_path.copy(compil_scripts_tmp_dir)
1075
1076     # find the environment script if any
1077     if src.product.product_has_env_script(p_info):
1078         env_script_path = src.Path(p_info.environ.env_script)
1079         env_script_path.copy(env_scripts_tmp_dir)
1080
1081     # find the patches if any
1082     if src.product.product_has_patches(p_info):
1083         patches = src.pyconf.Sequence()
1084         for patch_path in p_info.patches:
1085             p_path = src.Path(patch_path)
1086             p_path.copy(patches_tmp_dir)
1087             patches.append(os.path.basename(patch_path), "")
1088
1089     if (not with_vcs) and src.product.product_is_vcs(p_info):
1090         # in non vcs mode, if the product is not archive, then make it become archive.
1091
1092         # depending upon the incremental mode, select impacted sections
1093         if "properties" in p_info and "incremental" in p_info.properties and\
1094             p_info.properties.incremental == "yes":
1095             sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1096         else:
1097             sections = [p_info.section]
1098         for section in sections:
1099             if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1100                 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1101                           (p_name,section))
1102                 product_pyconf_cfg[section].get_source = "archive"
1103                 if not "archive_info" in product_pyconf_cfg[section]:
1104                     product_pyconf_cfg[section].addMapping("archive_info",
1105                                         src.pyconf.Mapping(product_pyconf_cfg),
1106                                         "")
1107                     product_pyconf_cfg[section].archive_info.archive_name =\
1108                         p_info.name + ".tgz"
1109     
1110     # write the pyconf file to the temporary project location
1111     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1112                                            p_name + ".pyconf")
1113     ff = open(product_tmp_pyconf_path, 'w')
1114     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1115     product_pyconf_cfg.__save__(ff, 1)
1116     ff.close()
1117
1118
1119 def write_application_pyconf(config, application_tmp_dir):
1120     '''Write the application pyconf file in the specific temporary 
1121        directory containing the specific project of a source package.
1122
1123     :param config Config: The global configuration.
1124     :param application_tmp_dir str: The path to the temporary application 
1125                                     scripts directory of the project.
1126     '''
1127     application_name = config.VARS.application
1128     # write the pyconf file to the temporary application location
1129     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1130                                                application_name + ".pyconf")
1131     with open(application_tmp_pyconf_path, 'w') as f:
1132         f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1133         res = src.pyconf.Config()
1134         app = src.pyconf.deepCopyMapping(config.APPLICATION)
1135         # no base in packages
1136         if "base" in app:
1137             app.base = "no" 
1138         # Change the workdir
1139         app.workdir = src.pyconf.Reference(
1140                                  app,
1141                                  src.pyconf.DOLLAR,
1142                                  'VARS.salometoolsway + $VARS.sep + ".."')
1143         res.addMapping("APPLICATION", app, "")
1144         res.__save__(f, evaluated=False)
1145     
1146
1147 def sat_package(config, tmp_working_dir, options, logger):
1148     '''Prepare a dictionary that stores all the needed directories and files to
1149        add in a salomeTool package.
1150     
1151     :param tmp_working_dir str: The temporary local working directory 
1152     :param options OptResult: the options of the launched command
1153     :return: the dictionary that stores all the needed directories and files to
1154              add in a salomeTool package.
1155              {label : (path_on_local_machine, path_in_archive)}
1156     :rtype: dict
1157     '''
1158     d_project = {}
1159
1160     # we include sat himself
1161     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1162
1163     # and we overwrite local.pyconf with a clean wersion.
1164     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1165     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1166     local_cfg = src.pyconf.Config(local_file_path)
1167     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1168     local_cfg.LOCAL["base"] = "default"
1169     local_cfg.LOCAL["workdir"] = "default"
1170     local_cfg.LOCAL["log_dir"] = "default"
1171     local_cfg.LOCAL["archive_dir"] = "default"
1172     local_cfg.LOCAL["VCS"] = "None"
1173     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1174
1175     # if the archive contains a project, we write its relative path in local.pyconf
1176     if options.project:
1177         project_arch_path = os.path.join("projects", options.project, 
1178                                          os.path.basename(options.project_file_path))
1179         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1180
1181     ff = open(local_pyconf_tmp_path, 'w')
1182     local_cfg.__save__(ff, 1)
1183     ff.close()
1184     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1185     return d_project
1186     
1187
1188 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1189     '''Prepare a dictionary that stores all the needed directories and files to
1190        add in a project package.
1191     
1192     :param project_file_path str: The path to the local project.
1193     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1194     :param tmp_working_dir str: The temporary local directory containing some 
1195                                 specific directories or files needed in the 
1196                                 project package
1197     :param embedded_in_sat boolean : the project package is embedded in a sat package
1198     :return: the dictionary that stores all the needed directories and files to
1199              add in a project package.
1200              {label : (path_on_local_machine, path_in_archive)}
1201     :rtype: dict
1202     '''
1203     d_project = {}
1204     # Read the project file and get the directories to add to the package
1205     
1206     try: 
1207       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1208     except:
1209       logger.write("""
1210 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1211       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1212       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1213     
1214     paths = {"APPLICATIONPATH" : "applications",
1215              "PRODUCTPATH" : "products",
1216              "JOBPATH" : "jobs",
1217              "MACHINEPATH" : "machines"}
1218     if not ftp_mode:
1219         paths["ARCHIVEPATH"] = "archives"
1220
1221     # Loop over the project paths and add it
1222     project_file_name = os.path.basename(project_file_path)
1223     for path in paths:
1224         if path not in project_pyconf_cfg:
1225             continue
1226         if embedded_in_sat:
1227             dest_path = os.path.join("projects", name_project, paths[path])
1228             project_file_dest = os.path.join("projects", name_project, project_file_name)
1229         else:
1230             dest_path = paths[path]
1231             project_file_dest = project_file_name
1232
1233         # Add the directory to the files to add in the package
1234         d_project[path] = (project_pyconf_cfg[path], dest_path)
1235
1236         # Modify the value of the path in the package
1237         project_pyconf_cfg[path] = src.pyconf.Reference(
1238                                     project_pyconf_cfg,
1239                                     src.pyconf.DOLLAR,
1240                                     'project_path + "/' + paths[path] + '"')
1241     
1242     # Modify some values
1243     if "project_path" not in project_pyconf_cfg:
1244         project_pyconf_cfg.addMapping("project_path",
1245                                       src.pyconf.Mapping(project_pyconf_cfg),
1246                                       "")
1247     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1248                                                            src.pyconf.DOLLAR,
1249                                                            'PWD')
1250     # we don't want to export these two fields
1251     project_pyconf_cfg.__delitem__("file_path")
1252     project_pyconf_cfg.__delitem__("PWD")
1253     if ftp_mode:
1254         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1255     
1256     # Write the project pyconf file
1257     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1258     ff = open(project_pyconf_tmp_path, 'w')
1259     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1260     project_pyconf_cfg.__save__(ff, 1)
1261     ff.close()
1262     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1263     
1264     return d_project
1265
1266 def add_readme(config, options, where):
1267     readme_path = os.path.join(where, "README")
1268     with codecs.open(readme_path, "w", 'utf-8') as f:
1269
1270     # templates for building the header
1271         readme_header="""
1272 # This package was generated with sat $version
1273 # Date: $date
1274 # User: $user
1275 # Distribution : $dist
1276
1277 In the following, $$ROOT represents the directory where you have installed 
1278 SALOME (the directory where this file is located).
1279
1280 """
1281         if src.architecture.is_windows():
1282             readme_header = readme_header.replace('$$ROOT','%ROOT%')
1283         readme_compilation_with_binaries="""
1284
1285 compilation based on the binaries used as prerequisites
1286 =======================================================
1287
1288 If you fail to compile the complete application (for example because
1289 you are not root on your system and cannot install missing packages), you
1290 may try a partial compilation based on the binaries.
1291 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1292 and do some substitutions on cmake and .la files (replace the build directories
1293 with local paths).
1294 The procedure to do it is:
1295  1) Remove or rename INSTALL directory if it exists
1296  2) Execute the shell script install_bin.sh:
1297  > cd $ROOT
1298  > ./install_bin.sh
1299  3) Use SalomeTool (as explained in Sources section) and compile only the 
1300     modules you need to (with -p option)
1301
1302 """
1303         readme_header_tpl=string.Template(readme_header)
1304         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1305                 "README_BIN.template")
1306         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1307                 "README_LAUNCHER.template")
1308         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1309                 "README_BIN_VIRTUAL_APP.template")
1310         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1311                 "README_SRC.template")
1312         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1313                 "README_PROJECT.template")
1314         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1315                 "README_SAT.template")
1316
1317         # prepare substitution dictionary
1318         d = dict()
1319         d['user'] = config.VARS.user
1320         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1321         d['version'] = src.get_salometool_version(config)
1322         d['dist'] = config.VARS.dist
1323         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1324
1325         if options.binaries or options.sources:
1326             d['application'] = config.VARS.application
1327             d['BINARIES']    = config.INTERNAL.config.install_dir
1328             d['SEPARATOR'] = config.VARS.sep
1329             if src.architecture.is_windows():
1330                 d['operatingSystem'] = 'Windows'
1331                 d['PYTHON3'] = 'python3'
1332                 d['ROOT']    = '%ROOT%'
1333             else:
1334                 d['operatingSystem'] = 'Linux'
1335                 d['PYTHON3'] = ''
1336                 d['ROOT']    = '$ROOT'
1337             f.write("# Application: " + d['application'] + "\n")
1338             if 'KERNEL' in config.APPLICATION.products:
1339                 VersionSalome = src.get_salome_version(config)
1340                 # Case where SALOME has the launcher that uses the SalomeContext API
1341                 if VersionSalome >= 730:
1342                     d['launcher'] = config.APPLICATION.profile.launcher_name
1343                 else:
1344                     d['virtual_app'] = 'runAppli' # this info is not used now)
1345
1346         # write the specific sections
1347         if options.binaries:
1348             f.write(src.template.substitute(readme_template_path_bin, d))
1349             if "virtual_app" in d:
1350                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1351             if "launcher" in d:
1352                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1353
1354         if options.sources:
1355             f.write(src.template.substitute(readme_template_path_src, d))
1356
1357         if options.binaries and options.sources and not src.architecture.is_windows():
1358             f.write(readme_compilation_with_binaries)
1359
1360         if options.project:
1361             f.write(src.template.substitute(readme_template_path_pro, d))
1362
1363         if options.sat:
1364             f.write(src.template.substitute(readme_template_path_sat, d))
1365     
1366     return readme_path
1367
1368 def update_config(config, logger,  prop, value):
1369     '''Remove from config.APPLICATION.products the products that have the property given as input.
1370     
1371     :param config Config: The global config.
1372     :param prop str: The property to filter
1373     :param value str: The value of the property to filter
1374     '''
1375     # if there is no APPLICATION (ex sat package -t) : nothing to do
1376     if "APPLICATION" in config:
1377         l_product_to_remove = []
1378         for product_name in config.APPLICATION.products.keys():
1379             prod_cfg = src.product.get_product_config(config, product_name)
1380             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1381                 l_product_to_remove.append(product_name)
1382         for product_name in l_product_to_remove:
1383             config.APPLICATION.products.__delitem__(product_name)
1384             logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1385
1386 def description():
1387     '''method that is called when salomeTools is called with --help option.
1388     
1389     :return: The text to display for the package command description.
1390     :rtype: str
1391     '''
1392     return _("""
1393 The package command creates a tar file archive of a product.
1394 There are four kinds of archive, which can be mixed:
1395
1396  1 - The binary archive. 
1397      It contains the product installation directories plus a launcher.
1398  2 - The sources archive. 
1399      It contains the product archives, a project (the application plus salomeTools).
1400  3 - The project archive. 
1401      It contains a project (give the project file path as argument).
1402  4 - The salomeTools archive. 
1403      It contains code utility salomeTools.
1404
1405 example:
1406  >> sat package SALOME-master --binaries --sources""")
1407   
1408 def run(args, runner, logger):
1409     '''method that is called when salomeTools is called with package parameter.
1410     '''
1411     
1412     # Parse the options
1413     (options, args) = parser.parse_args(args)
1414
1415     # Check that a type of package is called, and only one
1416     all_option_types = (options.binaries,
1417                         options.sources,
1418                         options.project not in ["", None],
1419                         options.sat)
1420
1421     # Check if no option for package type
1422     if all_option_types.count(True) == 0:
1423         msg = _("Error: Precise a type for the package\nUse one of the "
1424                 "following options: --binaries, --sources, --project or"
1425                 " --salometools")
1426         logger.write(src.printcolors.printcError(msg), 1)
1427         logger.write("\n", 1)
1428         return 1
1429     
1430     # The repository where to put the package if not Binary or Source
1431     package_default_path = runner.cfg.LOCAL.workdir
1432     
1433     # if the package contains binaries or sources:
1434     if options.binaries or options.sources:
1435         # Check that the command has been called with an application
1436         src.check_config_has_application(runner.cfg)
1437
1438         # Display information
1439         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1440                                                     runner.cfg.VARS.application), 1)
1441         
1442         # Get the default directory where to put the packages
1443         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1444         src.ensure_path_exists(package_default_path)
1445         
1446     # if the package contains a project:
1447     if options.project:
1448         # check that the project is visible by SAT
1449         projectNameFile = options.project + ".pyconf"
1450         foundProject = None
1451         for i in runner.cfg.PROJECTS.project_file_paths:
1452             baseName = os.path.basename(i)
1453             if baseName == projectNameFile:
1454                 foundProject = i
1455                 break
1456
1457         if foundProject is None:
1458             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1459             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1460 known projects are:
1461 %(2)s
1462
1463 Please add it in file:
1464 %(3)s""" % \
1465                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1466             logger.write(src.printcolors.printcError(msg), 1)
1467             logger.write("\n", 1)
1468             return 1
1469         else:
1470             options.project_file_path = foundProject
1471             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1472     
1473     # Remove the products that are filtered by the --without_properties option
1474     if options.without_properties:
1475         prop, value = options.without_properties
1476         update_config(runner.cfg, logger, prop, value)
1477
1478     # Remove from config the products that have the not_in_package property
1479     update_config(runner.cfg, logger, "not_in_package", "yes")
1480
1481     # for binary packages without sources, remove compile time products
1482     if options.binaries and (not options.sources):
1483         update_config(runner.cfg, logger, "compile_time", "yes")
1484     
1485     # get the name of the archive or build it
1486     if options.name:
1487         if os.path.basename(options.name) == options.name:
1488             # only a name (not a path)
1489             archive_name = options.name           
1490             dir_name = package_default_path
1491         else:
1492             archive_name = os.path.basename(options.name)
1493             dir_name = os.path.dirname(options.name)
1494         
1495         # suppress extension
1496         if archive_name[-len(".tgz"):] == ".tgz":
1497             archive_name = archive_name[:-len(".tgz")]
1498         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1499             archive_name = archive_name[:-len(".tar.gz")]
1500         
1501     else:
1502         archive_name=""
1503         dir_name = package_default_path
1504         if options.binaries or options.sources:
1505             archive_name = runner.cfg.APPLICATION.name
1506
1507         if options.binaries:
1508             archive_name += "-"+runner.cfg.VARS.dist
1509             
1510         if options.sources:
1511             archive_name += "-SRC"
1512             if options.with_vcs:
1513                 archive_name += "-VCS"
1514
1515         if options.sat:
1516             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1517
1518         if options.project:
1519             if options.sat:
1520                 archive_name += "_" 
1521             archive_name += ("satproject_" + options.project)
1522  
1523         if len(archive_name)==0: # no option worked 
1524             msg = _("Error: Cannot name the archive\n"
1525                     " check if at least one of the following options was "
1526                     "selected : --binaries, --sources, --project or"
1527                     " --salometools")
1528             logger.write(src.printcolors.printcError(msg), 1)
1529             logger.write("\n", 1)
1530             return 1
1531  
1532     path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1533     
1534     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1535
1536     # Create a working directory for all files that are produced during the
1537     # package creation and that will be removed at the end of the command
1538     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1539     src.ensure_path_exists(tmp_working_dir)
1540     logger.write("\n", 5)
1541     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1542     
1543     logger.write("\n", 3)
1544
1545     msg = _("Preparation of files to add to the archive")
1546     logger.write(src.printcolors.printcLabel(msg), 2)
1547     logger.write("\n", 2)
1548     
1549     d_files_to_add={}  # content of the archive
1550
1551     # a dict to hold paths that will need to be substitute for users recompilations
1552     d_paths_to_substitute={}  
1553
1554     if options.binaries:
1555         d_bin_files_to_add = binary_package(runner.cfg,
1556                                             logger,
1557                                             options,
1558                                             tmp_working_dir)
1559         # for all binaries dir, store the substitution that will be required 
1560         # for extra compilations
1561         for key in d_bin_files_to_add:
1562             if key.endswith("(bin)"):
1563                 source_dir = d_bin_files_to_add[key][0]
1564                 path_in_archive = d_bin_files_to_add[key][1].replace(
1565                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1566                    runner.cfg.INTERNAL.config.install_dir)
1567                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1568                     # if basename is the same we will just substitute the dirname 
1569                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1570                         os.path.dirname(path_in_archive)
1571                 else:
1572                     d_paths_to_substitute[source_dir]=path_in_archive
1573
1574         d_files_to_add.update(d_bin_files_to_add)
1575     if options.sources:
1576         d_files_to_add.update(source_package(runner,
1577                                         runner.cfg,
1578                                         logger, 
1579                                         options,
1580                                         tmp_working_dir))
1581         if options.binaries:
1582             # for archives with bin and sources we provide a shell script able to 
1583             # install binaries for compilation
1584             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1585                                                       tmp_working_dir,
1586                                                       d_paths_to_substitute,
1587                                                       "install_bin.sh")
1588             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1589             logger.write("substitutions that need to be done later : \n", 5)
1590             logger.write(str(d_paths_to_substitute), 5)
1591             logger.write("\n", 5)
1592     else:
1593         # --salomeTool option is not considered when --sources is selected, as this option
1594         # already brings salomeTool!
1595         if options.sat:
1596             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1597                                   options, logger))
1598         
1599     if options.project:
1600         DBG.write("config for package %s" % options.project, runner.cfg)
1601         d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1602
1603     if not(d_files_to_add):
1604         msg = _("Error: Empty dictionnary to build the archive!\n")
1605         logger.write(src.printcolors.printcError(msg), 1)
1606         logger.write("\n", 1)
1607         return 1
1608
1609     # Add the README file in the package
1610     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1611     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1612
1613     # Add the additional files of option add_files
1614     if options.add_files:
1615         for file_path in options.add_files:
1616             if not os.path.exists(file_path):
1617                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1618                 continue
1619             file_name = os.path.basename(file_path)
1620             d_files_to_add[file_name] = (file_path, file_name)
1621
1622     logger.write("\n", 2)
1623     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1624     logger.write("\n", 2)
1625     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1626
1627     res = 0
1628     try:
1629         # Creating the object tarfile
1630         tar = tarfile.open(path_targz, mode='w:gz')
1631         
1632         # get the filtering function if needed
1633         filter_function = exclude_VCS_and_extensions
1634
1635         # Add the files to the tarfile object
1636         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1637         tar.close()
1638     except KeyboardInterrupt:
1639         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1640         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1641         # remove the working directory
1642         shutil.rmtree(tmp_working_dir)
1643         logger.write(_("OK"), 1)
1644         logger.write(_("\n"), 1)
1645         return 1
1646     
1647     # case if no application, only package sat as 'sat package -t'
1648     try:
1649         app = runner.cfg.APPLICATION
1650     except:
1651         app = None
1652
1653     # unconditionaly remove the tmp_local_working_dir
1654     if app is not None:
1655         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1656         if os.path.isdir(tmp_local_working_dir):
1657             shutil.rmtree(tmp_local_working_dir)
1658
1659     # remove the tmp directory, unless user has registered as developer
1660     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1661         shutil.rmtree(tmp_working_dir)
1662     
1663     # Print again the path of the package
1664     logger.write("\n", 2)
1665     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1666     
1667     return res