]> SALOME platform Git repositories - tools/sat.git/blob - commands/package.py
Salome HOME
c9a033c36377408745625c0ca281f3258311df84
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 BINARY = "binary"
35 SOURCE = "Source"
36 PROJECT = "Project"
37 SAT = "Sat"
38
39 ARCHIVE_DIR = "ARCHIVES"
40 PROJECT_DIR = "PROJECT"
41
42 IGNORED_DIRS = [".git", ".svn"]
43 IGNORED_EXTENSIONS = []
44
45 PROJECT_TEMPLATE = """#!/usr/bin/env python
46 #-*- coding:utf-8 -*-
47
48 # The path to the archive root directory
49 root_path : $PWD + "/../"
50 # path to the PROJECT
51 project_path : $PWD + "/"
52
53 # Where to search the archives of the products
54 ARCHIVEPATH : $root_path + "ARCHIVES"
55 # Where to search the pyconf of the applications
56 APPLICATIONPATH : $project_path + "applications/"
57 # Where to search the pyconf of the products
58 PRODUCTPATH : $project_path + "products/"
59 # Where to search the pyconf of the jobs of the project
60 JOBPATH : $project_path + "jobs/"
61 # Where to search the pyconf of the machines of the project
62 MACHINEPATH : $project_path + "machines/"
63 """
64
65 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
66 #-*- coding:utf-8 -*-
67
68   LOCAL :
69   {
70     base : 'default'
71     workdir : 'default'
72     log_dir : 'default'
73     archive_dir : 'default'
74     VCS : None
75     tag : None
76   }
77
78 PROJECTS :
79 {
80 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
81 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
82 }
83 """)
84
85 # Define all possible option for the package command :  sat package <options>
86 parser = src.options.Options()
87 parser.add_option('b', 'binaries', 'boolean', 'binaries',
88     _('Optional: Produce a binary package.'), False)
89 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
90     _('Optional: Only binary package: produce the archive even if '
91       'there are some missing products.'), False)
92 parser.add_option('s', 'sources', 'boolean', 'sources',
93     _('Optional: Produce a compilable archive of the sources of the '
94       'application.'), False)
95 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
96     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
97       'Sat prepare will use VCS mode instead to retrieve them'),
98     False)
99 parser.add_option('', 'ftp', 'boolean', 'ftp',
100     _('Optional: Do not embed archives for products in archive mode.' 
101     'Sat prepare will use ftp instead to retrieve them'),
102     False)
103 parser.add_option('p', 'project', 'string', 'project',
104     _('Optional: Produce an archive that contains a project.'), "")
105 parser.add_option('t', 'salometools', 'boolean', 'sat',
106     _('Optional: Produce an archive that contains salomeTools.'), False)
107 parser.add_option('n', 'name', 'string', 'name',
108     _('Optional: The name or full path of the archive.'), None)
109 parser.add_option('', 'add_files', 'list2', 'add_files',
110     _('Optional: The list of additional files to add to the archive.'), [])
111 parser.add_option('', 'without_properties', 'properties', 'without_properties',
112     _('Optional: Filter the products by their properties.\n\tSyntax: '
113       '--without_properties <property>:<value>'))
114
115
116 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
117     '''Create an archive containing all directories and files that are given in
118        the d_content argument.
119     
120     :param tar tarfile: The tarfile instance used to make the archive.
121     :param name_archive str: The name of the archive to make.
122     :param d_content dict: The dictionary that contain all directories and files
123                            to add in the archive.
124                            d_content[label] = 
125                                         (path_on_local_machine, path_in_archive)
126     :param logger Logger: the logging instance
127     :param f_exclude Function: the function that filters
128     :return: 0 if success, 1 if not.
129     :rtype: int
130     '''
131     # get the max length of the messages in order to make the display
132     max_len = len(max(d_content.keys(), key=len))
133     
134     success = 0
135     # loop over each directory or file stored in the d_content dictionary
136     names = sorted(d_content.keys())
137     DBG.write("add tar names", names)
138
139     # used to avoid duplications (for pip install in python, or single_install_dir cases)
140     already_added=set() 
141     for name in names:
142         # display information
143         len_points = max_len - len(name) + 3
144         local_path, archive_path = d_content[name]
145         in_archive = os.path.join(name_archive, archive_path)
146         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
147         # Get the local path and the path in archive 
148         # of the directory or file to add
149         # Add it in the archive
150         try:
151             key=local_path+"->"+in_archive
152             if key not in already_added:
153                 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
154                 already_added.add(key)
155             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
156         except Exception as e:
157             logger.write(src.printcolors.printcError(_("KO ")), 3)
158             logger.write(str(e), 3)
159             success = 1
160         logger.write("\n", 3)
161     return success
162
163 def exclude_VCS_and_extensions(filename):
164     ''' The function that is used to exclude from package the link to the 
165         VCS repositories (like .git)
166
167     :param filename Str: The filname to exclude (or not).
168     :return: True if the file has to be exclude
169     :rtype: Boolean
170     '''
171     for dir_name in IGNORED_DIRS:
172         if dir_name in filename:
173             return True
174     for extension in IGNORED_EXTENSIONS:
175         if filename.endswith(extension):
176             return True
177     return False
178
179 def produce_relative_launcher(config,
180                               logger,
181                               file_dir,
182                               file_name,
183                               binaries_dir_name):
184     '''Create a specific SALOME launcher for the binary package. This launcher 
185        uses relative paths.
186     
187     :param config Config: The global configuration.
188     :param logger Logger: the logging instance
189     :param file_dir str: the directory where to put the launcher
190     :param file_name str: The launcher name
191     :param binaries_dir_name str: the name of the repository where the binaries
192                                   are, in the archive.
193     :return: the path of the produced launcher
194     :rtype: str
195     '''
196     
197     # get KERNEL installation path 
198     kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
199
200     # set kernel bin dir (considering fhs property)
201     kernel_cfg = src.product.get_product_config(config, "KERNEL")
202     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
203         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
204     else:
205         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
206
207     # check if the application contains an application module
208     # check also if the application has a distene product, 
209     # in this case get its licence file name
210     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
211     salome_application_name="Not defined" 
212     distene_licence_file_name=False
213     for prod_name, prod_info in l_product_info:
214         # look for a "salome application" and a distene product
215         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
216             distene_licence_file_name = src.product.product_has_licence(prod_info, 
217                                             config.PATHS.LICENCEPATH) 
218         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
219             salome_application_name=prod_info.name
220
221     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
222     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
223     if salome_application_name == "Not defined":
224         app_root_dir=kernel_root_dir
225     else:
226         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
227
228     additional_env={}
229     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
230                                                    config.VARS.sep + bin_kernel_install_dir
231     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
232         additional_env['sat_python_version'] = 3
233     else:
234         additional_env['sat_python_version'] = 2
235
236     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
237
238     # create an environment file writer
239     writer = src.environment.FileEnvWriter(config,
240                                            logger,
241                                            file_dir,
242                                            src_root=None,
243                                            env_info=None)
244     
245     filepath = os.path.join(file_dir, file_name)
246     # Write
247     writer.write_env_file(filepath,
248                           False,  # for launch
249                           "cfgForPy",
250                           additional_env=additional_env,
251                           no_path_init="False",
252                           for_package = binaries_dir_name)
253     
254     # Little hack to put out_dir_Path outside the strings
255     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
256     src.replace_in_file(filepath, "'out_dir_Path + ", "out_dir_Path + '" )
257     
258     # A hack to put a call to a file for distene licence.
259     # It does nothing to an application that has no distene product
260     if distene_licence_file_name:
261         logger.write("Application has a distene licence file! We use it in package launcher", 5)
262         hack_for_distene_licence(filepath, distene_licence_file_name)
263        
264     # change the rights in order to make the file executable for everybody
265     os.chmod(filepath,
266              stat.S_IRUSR |
267              stat.S_IRGRP |
268              stat.S_IROTH |
269              stat.S_IWUSR |
270              stat.S_IXUSR |
271              stat.S_IXGRP |
272              stat.S_IXOTH)
273
274     return filepath
275
276 def hack_for_distene_licence(filepath, licence_file):
277     '''Replace the distene licence env variable by a call to a file.
278     
279     :param filepath Str: The path to the launcher to modify.
280     '''  
281     shutil.move(filepath, filepath + "_old")
282     fileout= filepath
283     filein = filepath + "_old"
284     fin = open(filein, "r")
285     fout = open(fileout, "w")
286     text = fin.readlines()
287     # Find the Distene section
288     num_line = -1
289     for i,line in enumerate(text):
290         if "# Set DISTENE License" in line:
291             num_line = i
292             break
293     if num_line == -1:
294         # No distene product, there is nothing to do
295         fin.close()
296         for line in text:
297             fout.write(line)
298         fout.close()
299         return
300     del text[num_line +1]
301     del text[num_line +1]
302     text_to_insert ="""    try:
303         distene_licence_file="%s"
304         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
305             import importlib.util
306             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
307             distene=importlib.util.module_from_spec(spec_dist)
308             spec_dist.loader.exec_module(distene)
309         else:
310             import imp
311             distene = imp.load_source('distene_licence', distene_licence_file)
312         distene.set_distene_variables(context)
313     except:
314         pass\n"""  % licence_file
315     text.insert(num_line + 1, text_to_insert)
316     for line in text:
317         fout.write(line)
318     fin.close()    
319     fout.close()
320     return
321     
322 def produce_relative_env_files(config,
323                               logger,
324                               file_dir,
325                               binaries_dir_name):
326     '''Create some specific environment files for the binary package. These 
327        files use relative paths.
328     
329     :param config Config: The global configuration.
330     :param logger Logger: the logging instance
331     :param file_dir str: the directory where to put the files
332     :param binaries_dir_name str: the name of the repository where the binaries
333                                   are, in the archive.
334     :return: the list of path of the produced environment files
335     :rtype: List
336     '''  
337     # create an environment file writer
338     writer = src.environment.FileEnvWriter(config,
339                                            logger,
340                                            file_dir,
341                                            src_root=None)
342     
343     if src.architecture.is_windows():
344       shell = "bat"
345       filename  = "env_launch.bat"
346     else:
347       shell = "bash"
348       filename  = "env_launch.sh"
349
350     # Write
351     filepath = writer.write_env_file(filename,
352                           False, # for launch
353                           shell,
354                           for_package = binaries_dir_name)
355
356     # Little hack to put out_dir_Path as environment variable
357     src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
358
359     # change the rights in order to make the file executable for everybody
360     os.chmod(filepath,
361              stat.S_IRUSR |
362              stat.S_IRGRP |
363              stat.S_IROTH |
364              stat.S_IWUSR |
365              stat.S_IXUSR |
366              stat.S_IXGRP |
367              stat.S_IXOTH)
368     
369     return filepath
370
371 def produce_install_bin_file(config,
372                              logger,
373                              file_dir,
374                              d_sub,
375                              file_name):
376     '''Create a bash shell script which do substitutions in BIRARIES dir 
377        in order to use it for extra compilations.
378     
379     :param config Config: The global configuration.
380     :param logger Logger: the logging instance
381     :param file_dir str: the directory where to put the files
382     :param d_sub, dict: the dictionnary that contains the substitutions to be done
383     :param file_name str: the name of the install script file
384     :return: the produced file
385     :rtype: str
386     '''  
387     # Write
388     filepath = os.path.join(file_dir, file_name)
389     # open the file and write into it
390     # use codec utf-8 as sat variables are in unicode
391     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
392         installbin_template_path = os.path.join(config.VARS.internal_dir,
393                                         "INSTALL_BIN.template")
394         
395         # build the name of the directory that will contain the binaries
396         binaries_dir_name = "BINARIES-" + config.VARS.dist
397         # build the substitution loop
398         loop_cmd = "for f in $(grep -RIl"
399         for key in d_sub:
400             loop_cmd += " -e "+ key
401         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
402                     '); do\n     sed -i "\n'
403         for key in d_sub:
404             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
405         loop_cmd += '            " $f\ndone'
406
407         d={}
408         d["BINARIES_DIR"] = binaries_dir_name
409         d["SUBSTITUTION_LOOP"]=loop_cmd
410         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
411         
412         # substitute the template and write it in file
413         content=src.template.substitute(installbin_template_path, d)
414         installbin_file.write(content)
415         # change the rights in order to make the file executable for everybody
416         os.chmod(filepath,
417                  stat.S_IRUSR |
418                  stat.S_IRGRP |
419                  stat.S_IROTH |
420                  stat.S_IWUSR |
421                  stat.S_IXUSR |
422                  stat.S_IXGRP |
423                  stat.S_IXOTH)
424     
425     return filepath
426
427 def product_appli_creation_script(config,
428                                   logger,
429                                   file_dir,
430                                   binaries_dir_name):
431     '''Create a script that can produce an application (EDF style) in the binary
432        package.
433     
434     :param config Config: The global configuration.
435     :param logger Logger: the logging instance
436     :param file_dir str: the directory where to put the file
437     :param binaries_dir_name str: the name of the repository where the binaries
438                                   are, in the archive.
439     :return: the path of the produced script file
440     :rtype: Str
441     '''
442     template_name = "create_appli.py.for_bin_packages.template"
443     template_path = os.path.join(config.VARS.internal_dir, template_name)
444     text_to_fill = open(template_path, "r").read()
445     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
446                                         '"' + binaries_dir_name + '"')
447     
448     text_to_add = ""
449     for product_name in get_SALOME_modules(config):
450         product_info = src.product.get_product_config(config, product_name)
451        
452         if src.product.product_is_smesh_plugin(product_info):
453             continue
454
455         if 'install_dir' in product_info and bool(product_info.install_dir):
456             if src.product.product_is_cpp(product_info):
457                 # cpp module
458                 for cpp_name in src.product.get_product_components(product_info):
459                     line_to_add = ("<module name=\"" + 
460                                    cpp_name + 
461                                    "\" gui=\"yes\" path=\"''' + "
462                                    "os.path.join(dir_bin_name, \"" + 
463                                    cpp_name + "\") + '''\"/>")
464             else:
465                 # regular module
466                 line_to_add = ("<module name=\"" + 
467                                product_name + 
468                                "\" gui=\"yes\" path=\"''' + "
469                                "os.path.join(dir_bin_name, \"" + 
470                                product_name + "\") + '''\"/>")
471             text_to_add += line_to_add + "\n"
472     
473     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
474     
475     tmp_file_path = os.path.join(file_dir, "create_appli.py")
476     ff = open(tmp_file_path, "w")
477     ff.write(filled_text)
478     ff.close()
479     
480     # change the rights in order to make the file executable for everybody
481     os.chmod(tmp_file_path,
482              stat.S_IRUSR |
483              stat.S_IRGRP |
484              stat.S_IROTH |
485              stat.S_IWUSR |
486              stat.S_IXUSR |
487              stat.S_IXGRP |
488              stat.S_IXOTH)
489     
490     return tmp_file_path
491
492 def binary_package(config, logger, options, tmp_working_dir):
493     '''Prepare a dictionary that stores all the needed directories and files to
494        add in a binary package.
495     
496     :param config Config: The global configuration.
497     :param logger Logger: the logging instance
498     :param options OptResult: the options of the launched command
499     :param tmp_working_dir str: The temporary local directory containing some 
500                                 specific directories or files needed in the 
501                                 binary package
502     :return: the dictionary that stores all the needed directories and files to
503              add in a binary package.
504              {label : (path_on_local_machine, path_in_archive)}
505     :rtype: dict
506     '''
507
508     # Get the list of product installation to add to the archive
509     l_products_name = sorted(config.APPLICATION.products.keys())
510     l_product_info = src.product.get_products_infos(l_products_name,
511                                                     config)
512     l_install_dir = []
513     l_source_dir = []
514     l_not_installed = []
515     l_sources_not_present = []
516     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
517     if ("APPLICATION" in config  and
518         "properties"  in config.APPLICATION  and
519         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
520         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
521             generate_mesa_launcher=True
522
523     for prod_name, prod_info in l_product_info:
524         # skip product with property not_in_package set to yes
525         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
526             continue  
527
528         # Add the sources of the products that have the property 
529         # sources_in_package : "yes"
530         if src.get_property_in_product_cfg(prod_info,
531                                            "sources_in_package") == "yes":
532             if os.path.exists(prod_info.source_dir):
533                 l_source_dir.append((prod_name, prod_info.source_dir))
534             else:
535                 l_sources_not_present.append(prod_name)
536
537         # ignore the native and fixed products for install directories
538         if (src.product.product_is_native(prod_info) 
539                 or src.product.product_is_fixed(prod_info)
540                 or not src.product.product_compiles(prod_info)):
541             continue
542         if src.product.check_installation(config, prod_info):
543             l_install_dir.append((prod_name, prod_info.install_dir))
544         else:
545             l_not_installed.append(prod_name)
546         
547         # Add also the cpp generated modules (if any)
548         if src.product.product_is_cpp(prod_info):
549             # cpp module
550             for name_cpp in src.product.get_product_components(prod_info):
551                 install_dir = os.path.join(config.APPLICATION.workdir,
552                                            config.INTERNAL.config.install_dir,
553                                            name_cpp) 
554                 if os.path.exists(install_dir):
555                     l_install_dir.append((name_cpp, install_dir))
556                 else:
557                     l_not_installed.append(name_cpp)
558         
559     # check the name of the directory that (could) contains the binaries 
560     # from previous detar
561     binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
562     if os.path.exists(binaries_from_detar):
563          logger.write("""
564 WARNING: existing binaries directory from previous detar installation:
565          %s
566          To make new package from this, you have to: 
567          1) install binaries in INSTALL directory with the script "install_bin.sh" 
568             see README file for more details
569          2) or recompile everything in INSTALL with "sat compile" command 
570             this step is long, and requires some linux packages to be installed 
571             on your system\n
572 """ % binaries_from_detar)
573     
574     # Print warning or error if there are some missing products
575     if len(l_not_installed) > 0:
576         text_missing_prods = ""
577         for p_name in l_not_installed:
578             text_missing_prods += "-" + p_name + "\n"
579         if not options.force_creation:
580             msg = _("ERROR: there are missing products installations:")
581             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
582                                      text_missing_prods),
583                          1)
584             return None
585         else:
586             msg = _("WARNING: there are missing products installations:")
587             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
588                                      text_missing_prods),
589                          1)
590
591     # Do the same for sources
592     if len(l_sources_not_present) > 0:
593         text_missing_prods = ""
594         for p_name in l_sources_not_present:
595             text_missing_prods += "-" + p_name + "\n"
596         if not options.force_creation:
597             msg = _("ERROR: there are missing products sources:")
598             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
599                                      text_missing_prods),
600                          1)
601             return None
602         else:
603             msg = _("WARNING: there are missing products sources:")
604             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
605                                      text_missing_prods),
606                          1)
607  
608     # construct the name of the directory that will contain the binaries
609     binaries_dir_name = "BINARIES-" + config.VARS.dist
610     
611     # construct the correlation table between the product names, there 
612     # actual install directories and there install directory in archive
613     d_products = {}
614     for prod_name, install_dir in l_install_dir:
615         path_in_archive = os.path.join(binaries_dir_name, os.path.basename(install_dir))
616         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
617         
618     for prod_name, source_dir in l_source_dir:
619         path_in_archive = os.path.join("SOURCES", prod_name)
620         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
621
622     # for packages of SALOME applications including KERNEL, 
623     # we produce a salome launcher or a virtual application (depending on salome version)
624     if 'KERNEL' in config.APPLICATION.products:
625         VersionSalome = src.get_salome_version(config)
626         # Case where SALOME has the launcher that uses the SalomeContext API
627         if VersionSalome >= 730:
628             # create the relative launcher and add it to the files to add
629             launcher_name = src.get_launcher_name(config)
630             launcher_package = produce_relative_launcher(config,
631                                                  logger,
632                                                  tmp_working_dir,
633                                                  launcher_name,
634                                                  binaries_dir_name)
635             d_products["launcher"] = (launcher_package, launcher_name)
636
637             # if the application contains mesa products, we generate in addition to the 
638             # classical salome launcher a launcher using mesa and called mesa_salome 
639             # (the mesa launcher will be used for remote usage through ssh).
640             if generate_mesa_launcher:
641                 #if there is one : store the use_mesa property
642                 restore_use_mesa_option=None
643                 if ('properties' in config.APPLICATION and 
644                     'use_mesa' in config.APPLICATION.properties):
645                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
646
647                 # activate mesa property, and generate a mesa launcher
648                 src.activate_mesa_property(config)  #activate use_mesa property
649                 launcher_mesa_name="mesa_"+launcher_name
650                 launcher_package_mesa = produce_relative_launcher(config,
651                                                      logger,
652                                                      tmp_working_dir,
653                                                      launcher_mesa_name,
654                                                      binaries_dir_name)
655                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
656
657                 # if there was a use_mesa value, we restore it
658                 # else we set it to the default value "no"
659                 if restore_use_mesa_option != None:
660                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
661                 else:
662                     config.APPLICATION.properties.use_mesa="no"
663
664             if options.sources:
665                 # if we mix binaries and sources, we add a copy of the launcher, 
666                 # prefixed  with "bin",in order to avoid clashes
667                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
668         else:
669             # Provide a script for the creation of an application EDF style
670             appli_script = product_appli_creation_script(config,
671                                                         logger,
672                                                         tmp_working_dir,
673                                                         binaries_dir_name)
674             
675             d_products["appli script"] = (appli_script, "create_appli.py")
676
677     # Put also the environment file
678     env_file = produce_relative_env_files(config,
679                                            logger,
680                                            tmp_working_dir,
681                                            binaries_dir_name)
682
683     if src.architecture.is_windows():
684       filename  = "env_launch.bat"
685     else:
686       filename  = "env_launch.sh"
687     d_products["environment file"] = (env_file, filename)      
688
689     return d_products
690
691 def source_package(sat, config, logger, options, tmp_working_dir):
692     '''Prepare a dictionary that stores all the needed directories and files to
693        add in a source package.
694     
695     :param config Config: The global configuration.
696     :param logger Logger: the logging instance
697     :param options OptResult: the options of the launched command
698     :param tmp_working_dir str: The temporary local directory containing some 
699                                 specific directories or files needed in the 
700                                 binary package
701     :return: the dictionary that stores all the needed directories and files to
702              add in a source package.
703              {label : (path_on_local_machine, path_in_archive)}
704     :rtype: dict
705     '''
706     
707     d_archives={}
708     # Get all the products that are prepared using an archive
709     # unless ftp mode is specified (in this case the user of the
710     # archive will get the sources through the ftp mode of sat prepare
711     if not options.ftp:
712         logger.write("Find archive products ... ")
713         d_archives, l_pinfo_vcs = get_archives(config, logger)
714         logger.write("Done\n")
715
716     d_archives_vcs = {}
717     if not options.with_vcs and len(l_pinfo_vcs) > 0:
718         # Make archives with the products that are not prepared using an archive
719         # (git, cvs, svn, etc)
720         logger.write("Construct archives for vcs products ... ")
721         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
722                                           sat,
723                                           config,
724                                           logger,
725                                           tmp_working_dir)
726         logger.write("Done\n")
727
728     # Create a project
729     logger.write("Create the project ... ")
730     d_project = create_project_for_src_package(config,
731                                                tmp_working_dir,
732                                                options.with_vcs,
733                                                options.ftp)
734     logger.write("Done\n")
735     
736     # Add salomeTools
737     tmp_sat = add_salomeTools(config, tmp_working_dir)
738     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
739     
740     # Add a sat symbolic link if not win
741     if not src.architecture.is_windows():
742         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
743         try:
744             t = os.getcwd()
745         except:
746             # In the jobs, os.getcwd() can fail
747             t = config.LOCAL.workdir
748         os.chdir(tmp_working_dir)
749         if os.path.lexists(tmp_satlink_path):
750             os.remove(tmp_satlink_path)
751         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
752         os.chdir(t)
753         
754         d_sat["sat link"] = (tmp_satlink_path, "sat")
755     
756     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
757     return d_source
758
759 def get_archives(config, logger):
760     '''Find all the products that are get using an archive and all the products
761        that are get using a vcs (git, cvs, svn) repository.
762     
763     :param config Config: The global configuration.
764     :param logger Logger: the logging instance
765     :return: the dictionary {name_product : 
766              (local path of its archive, path in the package of its archive )}
767              and the list of specific configuration corresponding to the vcs 
768              products
769     :rtype: (Dict, List)
770     '''
771     # Get the list of product informations
772     l_products_name = config.APPLICATION.products.keys()
773     l_product_info = src.product.get_products_infos(l_products_name,
774                                                     config)
775     d_archives = {}
776     l_pinfo_vcs = []
777     for p_name, p_info in l_product_info:
778         # skip product with property not_in_package set to yes
779         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
780             continue  
781         # ignore the native and fixed products
782         if (src.product.product_is_native(p_info) 
783                 or src.product.product_is_fixed(p_info)):
784             continue
785         if p_info.get_source == "archive":
786             archive_path = p_info.archive_info.archive_name
787             archive_name = os.path.basename(archive_path)
788             d_archives[p_name] = (archive_path,
789                                   os.path.join(ARCHIVE_DIR, archive_name))
790             if (src.appli_test_property(config,"pip", "yes") and 
791                 src.product.product_test_property(p_info,"pip", "yes")):
792                 # if pip mode is activated, and product is managed by pip
793                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
794                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
795                     "%s-%s*" % (p_info.name, p_info.version))
796                 pip_wheel_path=glob.glob(pip_wheel_pattern)
797                 msg_pip_not_found="Error in get_archive, pip wheel for "\
798                                   "product %s-%s was not found in %s directory"
799                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
800                                   "product %s-%s were found in %s directory"
801                 if len(pip_wheel_path)==0:
802                     raise src.SatException(msg_pip_not_found %\
803                         (p_info.name, p_info.version, pip_wheels_dir))
804                 if len(pip_wheel_path)>1:
805                     raise src.SatException(msg_pip_two_or_more %\
806                         (p_info.name, p_info.version, pip_wheels_dir))
807
808                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
809                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
810                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
811         else:
812             # this product is not managed by archive, 
813             # an archive of the vcs directory will be created by get_archive_vcs
814             l_pinfo_vcs.append((p_name, p_info)) 
815             
816     return d_archives, l_pinfo_vcs
817
818 def add_salomeTools(config, tmp_working_dir):
819     '''Prepare a version of salomeTools that has a specific local.pyconf file 
820        configured for a source package.
821
822     :param config Config: The global configuration.
823     :param tmp_working_dir str: The temporary local directory containing some 
824                                 specific directories or files needed in the 
825                                 source package
826     :return: The path to the local salomeTools directory to add in the package
827     :rtype: str
828     '''
829     # Copy sat in the temporary working directory
830     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
831     sat_running_path = src.Path(config.VARS.salometoolsway)
832     sat_running_path.copy(sat_tmp_path)
833     
834     # Update the local.pyconf file that contains the path to the project
835     local_pyconf_name = "local.pyconf"
836     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
837     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
838     # Remove the .pyconf file in the root directory of salomeTools if there is
839     # any. (For example when launching jobs, a pyconf file describing the jobs 
840     # can be here and is not useful) 
841     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
842     for file_or_dir in files_or_dir_SAT:
843         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
844             file_path = os.path.join(tmp_working_dir,
845                                      "salomeTools",
846                                      file_or_dir)
847             os.remove(file_path)
848     
849     ff = open(local_pyconf_file, "w")
850     ff.write(LOCAL_TEMPLATE)
851     ff.close()
852     
853     return sat_tmp_path.path
854
855 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
856     '''For sources package that require that all products are get using an 
857        archive, one has to create some archive for the vcs products.
858        So this method calls the clean and source command of sat and then create
859        the archives.
860
861     :param l_pinfo_vcs List: The list of specific configuration corresponding to
862                              each vcs product
863     :param sat Sat: The Sat instance that can be called to clean and source the
864                     products
865     :param config Config: The global configuration.
866     :param logger Logger: the logging instance
867     :param tmp_working_dir str: The temporary local directory containing some 
868                                 specific directories or files needed in the 
869                                 source package
870     :return: the dictionary that stores all the archives to add in the source 
871              package. {label : (path_on_local_machine, path_in_archive)}
872     :rtype: dict
873     '''
874     # clean the source directory of all the vcs products, then use the source 
875     # command and thus construct an archive that will not contain the patches
876     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
877     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
878       logger.write(_("\nclean sources\n"))
879       args_clean = config.VARS.application
880       args_clean += " --sources --products "
881       args_clean += ",".join(l_prod_names)
882       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
883       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
884     if True:
885       # source
886       logger.write(_("get sources\n"))
887       args_source = config.VARS.application
888       args_source += " --products "
889       args_source += ",".join(l_prod_names)
890       svgDir = sat.cfg.APPLICATION.workdir
891       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
892       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
893       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
894       # DBG.write("sat config id", id(sat.cfg), True)
895       # shit as config is not same id() as for sat.source()
896       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
897       import source
898       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
899       
900       # make the new archives
901       d_archives_vcs = {}
902       for pn, pinfo in l_pinfo_vcs:
903           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
904           logger.write("make archive vcs '%s'\n" % path_archive)
905           d_archives_vcs[pn] = (path_archive,
906                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
907       sat.cfg.APPLICATION.workdir = svgDir
908       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
909     return d_archives_vcs
910
911 def make_archive(prod_name, prod_info, where):
912     '''Create an archive of a product by searching its source directory.
913
914     :param prod_name str: The name of the product.
915     :param prod_info Config: The specific configuration corresponding to the 
916                              product
917     :param where str: The path of the repository where to put the resulting 
918                       archive
919     :return: The path of the resulting archive
920     :rtype: str
921     '''
922     path_targz_prod = os.path.join(where, prod_name + ".tgz")
923     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
924     local_path = prod_info.source_dir
925     tar_prod.add(local_path,
926                  arcname=prod_name,
927                  exclude=exclude_VCS_and_extensions)
928     tar_prod.close()
929     return path_targz_prod       
930
931 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
932     '''Create a specific project for a source package.
933
934     :param config Config: The global configuration.
935     :param tmp_working_dir str: The temporary local directory containing some 
936                                 specific directories or files needed in the 
937                                 source package
938     :param with_vcs boolean: True if the package is with vcs products (not 
939                              transformed into archive products)
940     :param with_ftp boolean: True if the package use ftp servers to get archives
941     :return: The dictionary 
942              {"project" : (produced project, project path in the archive)}
943     :rtype: Dict
944     '''
945
946     # Create in the working temporary directory the full project tree
947     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
948     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
949                                          "products")
950     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
951                                          "products",
952                                          "compil_scripts")
953     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
954                                          "products",
955                                          "env_scripts")
956     patches_tmp_dir = os.path.join(project_tmp_dir,
957                                          "products",
958                                          "patches")
959     application_tmp_dir = os.path.join(project_tmp_dir,
960                                          "applications")
961     for directory in [project_tmp_dir,
962                       compil_scripts_tmp_dir,
963                       env_scripts_tmp_dir,
964                       patches_tmp_dir,
965                       application_tmp_dir]:
966         src.ensure_path_exists(directory)
967
968     # Create the pyconf that contains the information of the project
969     project_pyconf_name = "project.pyconf"        
970     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
971     ff = open(project_pyconf_file, "w")
972     ff.write(PROJECT_TEMPLATE)
973     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
974         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
975         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
976             ftp_path=ftp_path+":"+ftpserver
977         ftp_path+='"'
978         ff.write("# ftp servers where to search for prerequisite archives\n")
979         ff.write(ftp_path)
980     # add licence paths if any
981     if len(config.PATHS.LICENCEPATH) > 0:  
982         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
983         for path in config.PATHS.LICENCEPATH[1:]:
984             licence_path=licence_path+":"+path
985         licence_path+='"'
986         ff.write("\n# Where to search for licences\n")
987         ff.write(licence_path)
988         
989
990     ff.close()
991     
992     # Loop over the products to get there pyconf and all the scripts 
993     # (compilation, environment, patches)
994     # and create the pyconf file to add to the project
995     lproducts_name = config.APPLICATION.products.keys()
996     l_products = src.product.get_products_infos(lproducts_name, config)
997     for p_name, p_info in l_products:
998         # skip product with property not_in_package set to yes
999         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1000             continue  
1001         find_product_scripts_and_pyconf(p_name,
1002                                         p_info,
1003                                         config,
1004                                         with_vcs,
1005                                         compil_scripts_tmp_dir,
1006                                         env_scripts_tmp_dir,
1007                                         patches_tmp_dir,
1008                                         products_pyconf_tmp_dir)
1009     
1010     find_application_pyconf(config, application_tmp_dir)
1011     
1012     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1013     return d_project
1014
1015 def find_product_scripts_and_pyconf(p_name,
1016                                     p_info,
1017                                     config,
1018                                     with_vcs,
1019                                     compil_scripts_tmp_dir,
1020                                     env_scripts_tmp_dir,
1021                                     patches_tmp_dir,
1022                                     products_pyconf_tmp_dir):
1023     '''Create a specific pyconf file for a given product. Get its environment 
1024        script, its compilation script and patches and put it in the temporary
1025        working directory. This method is used in the source package in order to
1026        construct the specific project.
1027
1028     :param p_name str: The name of the product.
1029     :param p_info Config: The specific configuration corresponding to the 
1030                              product
1031     :param config Config: The global configuration.
1032     :param with_vcs boolean: True if the package is with vcs products (not 
1033                              transformed into archive products)
1034     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1035                                        scripts directory of the project.
1036     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1037                                     directory of the project.
1038     :param patches_tmp_dir str: The path to the temporary patch scripts 
1039                                 directory of the project.
1040     :param products_pyconf_tmp_dir str: The path to the temporary product 
1041                                         scripts directory of the project.
1042     '''
1043     
1044     # read the pyconf of the product
1045     product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
1046                                            config.PATHS.PRODUCTPATH)
1047     product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
1048
1049     # find the compilation script if any
1050     if src.product.product_has_script(p_info):
1051         compil_script_path = src.Path(p_info.compil_script)
1052         compil_script_path.copy(compil_scripts_tmp_dir)
1053         product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
1054                                                     p_info.compil_script)
1055     # find the environment script if any
1056     if src.product.product_has_env_script(p_info):
1057         env_script_path = src.Path(p_info.environ.env_script)
1058         env_script_path.copy(env_scripts_tmp_dir)
1059         product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
1060                                                 p_info.environ.env_script)
1061     # find the patches if any
1062     if src.product.product_has_patches(p_info):
1063         patches = src.pyconf.Sequence()
1064         for patch_path in p_info.patches:
1065             p_path = src.Path(patch_path)
1066             p_path.copy(patches_tmp_dir)
1067             patches.append(os.path.basename(patch_path), "")
1068
1069         product_pyconf_cfg[p_info.section].patches = patches
1070     
1071     if with_vcs:
1072         # put in the pyconf file the resolved values
1073         for info in ["git_info", "cvs_info", "svn_info"]:
1074             if info in p_info:
1075                 for key in p_info[info]:
1076                     product_pyconf_cfg[p_info.section][info][key] = p_info[
1077                                                                       info][key]
1078     else:
1079         # if the product is not archive, then make it become archive.
1080         if src.product.product_is_vcs(p_info):
1081             product_pyconf_cfg[p_info.section].get_source = "archive"
1082             if not "archive_info" in product_pyconf_cfg[p_info.section]:
1083                 product_pyconf_cfg[p_info.section].addMapping("archive_info",
1084                                         src.pyconf.Mapping(product_pyconf_cfg),
1085                                         "")
1086             product_pyconf_cfg[p_info.section
1087                               ].archive_info.archive_name = p_info.name + ".tgz"
1088     
1089     # write the pyconf file to the temporary project location
1090     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1091                                            p_name + ".pyconf")
1092     ff = open(product_tmp_pyconf_path, 'w')
1093     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1094     product_pyconf_cfg.__save__(ff, 1)
1095     ff.close()
1096
1097 def find_application_pyconf(config, application_tmp_dir):
1098     '''Find the application pyconf file and put it in the specific temporary 
1099        directory containing the specific project of a source package.
1100
1101     :param config Config: The global configuration.
1102     :param application_tmp_dir str: The path to the temporary application 
1103                                        scripts directory of the project.
1104     '''
1105     # read the pyconf of the application
1106     application_name = config.VARS.application
1107     application_pyconf_path = src.find_file_in_lpath(
1108                                             application_name + ".pyconf",
1109                                             config.PATHS.APPLICATIONPATH)
1110     application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1111     
1112     # Change the workdir
1113     application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1114                                     application_pyconf_cfg,
1115                                     src.pyconf.DOLLAR,
1116                                     'VARS.salometoolsway + $VARS.sep + ".."')
1117
1118     # Prevent from compilation in base
1119     application_pyconf_cfg.APPLICATION.no_base = "yes"
1120     
1121     #remove products that are not in config (which were filtered by --without_properties)
1122     for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1123         if product_name not in config.APPLICATION.products.keys():
1124             application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1125
1126     # write the pyconf file to the temporary application location
1127     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1128                                                application_name + ".pyconf")
1129
1130     ff = open(application_tmp_pyconf_path, 'w')
1131     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1132     application_pyconf_cfg.__save__(ff, 1)
1133     ff.close()
1134
1135 def sat_package(config, tmp_working_dir, options, logger):
1136     '''Prepare a dictionary that stores all the needed directories and files to
1137        add in a salomeTool package.
1138     
1139     :param tmp_working_dir str: The temporary local working directory 
1140     :param options OptResult: the options of the launched command
1141     :return: the dictionary that stores all the needed directories and files to
1142              add in a salomeTool package.
1143              {label : (path_on_local_machine, path_in_archive)}
1144     :rtype: dict
1145     '''
1146     d_project = {}
1147
1148     # we include sat himself
1149     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1150
1151     # and we overwrite local.pyconf with a clean wersion.
1152     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1153     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1154     local_cfg = src.pyconf.Config(local_file_path)
1155     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1156     local_cfg.LOCAL["base"] = "default"
1157     local_cfg.LOCAL["workdir"] = "default"
1158     local_cfg.LOCAL["log_dir"] = "default"
1159     local_cfg.LOCAL["archive_dir"] = "default"
1160     local_cfg.LOCAL["VCS"] = "None"
1161     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1162
1163     # if the archive contains a project, we write its relative path in local.pyconf
1164     if options.project:
1165         project_arch_path = os.path.join("projects", options.project, 
1166                                          os.path.basename(options.project_file_path))
1167         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1168
1169     ff = open(local_pyconf_tmp_path, 'w')
1170     local_cfg.__save__(ff, 1)
1171     ff.close()
1172     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1173     return d_project
1174     
1175
1176 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1177     '''Prepare a dictionary that stores all the needed directories and files to
1178        add in a project package.
1179     
1180     :param project_file_path str: The path to the local project.
1181     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1182     :param tmp_working_dir str: The temporary local directory containing some 
1183                                 specific directories or files needed in the 
1184                                 project package
1185     :param embedded_in_sat boolean : the project package is embedded in a sat package
1186     :return: the dictionary that stores all the needed directories and files to
1187              add in a project package.
1188              {label : (path_on_local_machine, path_in_archive)}
1189     :rtype: dict
1190     '''
1191     d_project = {}
1192     # Read the project file and get the directories to add to the package
1193     
1194     try: 
1195       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1196     except:
1197       logger.write("""
1198 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1199       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1200       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1201     
1202     paths = {"APPLICATIONPATH" : "applications",
1203              "PRODUCTPATH" : "products",
1204              "JOBPATH" : "jobs",
1205              "MACHINEPATH" : "machines"}
1206     if not ftp_mode:
1207         paths["ARCHIVEPATH"] = "archives"
1208
1209     # Loop over the project paths and add it
1210     project_file_name = os.path.basename(project_file_path)
1211     for path in paths:
1212         if path not in project_pyconf_cfg:
1213             continue
1214         if embedded_in_sat:
1215             dest_path = os.path.join("projects", name_project, paths[path])
1216             project_file_dest = os.path.join("projects", name_project, project_file_name)
1217         else:
1218             dest_path = paths[path]
1219             project_file_dest = project_file_name
1220
1221         # Add the directory to the files to add in the package
1222         d_project[path] = (project_pyconf_cfg[path], dest_path)
1223
1224         # Modify the value of the path in the package
1225         project_pyconf_cfg[path] = src.pyconf.Reference(
1226                                     project_pyconf_cfg,
1227                                     src.pyconf.DOLLAR,
1228                                     'project_path + "/' + paths[path] + '"')
1229     
1230     # Modify some values
1231     if "project_path" not in project_pyconf_cfg:
1232         project_pyconf_cfg.addMapping("project_path",
1233                                       src.pyconf.Mapping(project_pyconf_cfg),
1234                                       "")
1235     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1236                                                            src.pyconf.DOLLAR,
1237                                                            'PWD')
1238     # we don't want to export these two fields
1239     project_pyconf_cfg.__delitem__("file_path")
1240     project_pyconf_cfg.__delitem__("PWD")
1241     if ftp_mode:
1242         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1243     
1244     # Write the project pyconf file
1245     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1246     ff = open(project_pyconf_tmp_path, 'w')
1247     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1248     project_pyconf_cfg.__save__(ff, 1)
1249     ff.close()
1250     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1251     
1252     return d_project
1253
1254 def add_readme(config, options, where):
1255     readme_path = os.path.join(where, "README")
1256     with codecs.open(readme_path, "w", 'utf-8') as f:
1257
1258     # templates for building the header
1259         readme_header="""
1260 # This package was generated with sat $version
1261 # Date: $date
1262 # User: $user
1263 # Distribution : $dist
1264
1265 In the following, $$ROOT represents the directory where you have installed 
1266 SALOME (the directory where this file is located).
1267
1268 """
1269         readme_compilation_with_binaries="""
1270
1271 compilation based on the binaries used as prerequisites
1272 =======================================================
1273
1274 If you fail to compile the complete application (for example because
1275 you are not root on your system and cannot install missing packages), you
1276 may try a partial compilation based on the binaries.
1277 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1278 and do some substitutions on cmake and .la files (replace the build directories
1279 with local paths).
1280 The procedure to do it is:
1281  1) Remove or rename INSTALL directory if it exists
1282  2) Execute the shell script install_bin.sh:
1283  > cd $ROOT
1284  > ./install_bin.sh
1285  3) Use SalomeTool (as explained in Sources section) and compile only the 
1286     modules you need to (with -p option)
1287
1288 """
1289         readme_header_tpl=string.Template(readme_header)
1290         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1291                 "README_BIN.template")
1292         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1293                 "README_LAUNCHER.template")
1294         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1295                 "README_BIN_VIRTUAL_APP.template")
1296         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1297                 "README_SRC.template")
1298         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1299                 "README_PROJECT.template")
1300         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1301                 "README_SAT.template")
1302
1303         # prepare substitution dictionary
1304         d = dict()
1305         d['user'] = config.VARS.user
1306         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1307         d['version'] = src.get_salometool_version(config)
1308         d['dist'] = config.VARS.dist
1309         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1310
1311         if options.binaries or options.sources:
1312             d['application'] = config.VARS.application
1313             f.write("# Application: " + d['application'] + "\n")
1314             if 'KERNEL' in config.APPLICATION.products:
1315                 VersionSalome = src.get_salome_version(config)
1316                 # Case where SALOME has the launcher that uses the SalomeContext API
1317                 if VersionSalome >= 730:
1318                     d['launcher'] = config.APPLICATION.profile.launcher_name
1319                 else:
1320                     d['virtual_app'] = 'runAppli' # this info is not used now)
1321
1322         # write the specific sections
1323         if options.binaries:
1324             f.write(src.template.substitute(readme_template_path_bin, d))
1325             if "virtual_app" in d:
1326                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1327             if "launcher" in d:
1328                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1329
1330         if options.sources:
1331             f.write(src.template.substitute(readme_template_path_src, d))
1332
1333         if options.binaries and options.sources:
1334             f.write(readme_compilation_with_binaries)
1335
1336         if options.project:
1337             f.write(src.template.substitute(readme_template_path_pro, d))
1338
1339         if options.sat:
1340             f.write(src.template.substitute(readme_template_path_sat, d))
1341     
1342     return readme_path
1343
1344 def update_config(config, prop, value):
1345     '''Remove from config.APPLICATION.products the products that have the property given as input.
1346     
1347     :param config Config: The global config.
1348     :param prop str: The property to filter
1349     :param value str: The value of the property to filter
1350     '''
1351     # if there is no APPLICATION (ex sat package -t) : nothing to do
1352     if "APPLICATION" in config:
1353         l_product_to_remove = []
1354         for product_name in config.APPLICATION.products.keys():
1355             prod_cfg = src.product.get_product_config(config, product_name)
1356             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1357                 l_product_to_remove.append(product_name)
1358         for product_name in l_product_to_remove:
1359             config.APPLICATION.products.__delitem__(product_name)
1360
1361 def description():
1362     '''method that is called when salomeTools is called with --help option.
1363     
1364     :return: The text to display for the package command description.
1365     :rtype: str
1366     '''
1367     return _("""
1368 The package command creates a tar file archive of a product.
1369 There are four kinds of archive, which can be mixed:
1370
1371  1 - The binary archive. 
1372      It contains the product installation directories plus a launcher.
1373  2 - The sources archive. 
1374      It contains the product archives, a project (the application plus salomeTools).
1375  3 - The project archive. 
1376      It contains a project (give the project file path as argument).
1377  4 - The salomeTools archive. 
1378      It contains code utility salomeTools.
1379
1380 example:
1381  >> sat package SALOME-master --binaries --sources""")
1382   
1383 def run(args, runner, logger):
1384     '''method that is called when salomeTools is called with package parameter.
1385     '''
1386     
1387     # Parse the options
1388     (options, args) = parser.parse_args(args)
1389
1390     # Check that a type of package is called, and only one
1391     all_option_types = (options.binaries,
1392                         options.sources,
1393                         options.project not in ["", None],
1394                         options.sat)
1395
1396     # Check if no option for package type
1397     if all_option_types.count(True) == 0:
1398         msg = _("Error: Precise a type for the package\nUse one of the "
1399                 "following options: --binaries, --sources, --project or"
1400                 " --salometools")
1401         logger.write(src.printcolors.printcError(msg), 1)
1402         logger.write("\n", 1)
1403         return 1
1404     
1405     # The repository where to put the package if not Binary or Source
1406     package_default_path = runner.cfg.LOCAL.workdir
1407     
1408     # if the package contains binaries or sources:
1409     if options.binaries or options.sources:
1410         # Check that the command has been called with an application
1411         src.check_config_has_application(runner.cfg)
1412
1413         # Display information
1414         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1415                                                     runner.cfg.VARS.application), 1)
1416         
1417         # Get the default directory where to put the packages
1418         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1419         src.ensure_path_exists(package_default_path)
1420         
1421     # if the package contains a project:
1422     if options.project:
1423         # check that the project is visible by SAT
1424         projectNameFile = options.project + ".pyconf"
1425         foundProject = None
1426         for i in runner.cfg.PROJECTS.project_file_paths:
1427             baseName = os.path.basename(i)
1428             if baseName == projectNameFile:
1429                 foundProject = i
1430                 break
1431
1432         if foundProject is None:
1433             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1434             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1435 known projects are:
1436 %(2)s
1437
1438 Please add it in file:
1439 %(3)s""" % \
1440                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1441             logger.write(src.printcolors.printcError(msg), 1)
1442             logger.write("\n", 1)
1443             return 1
1444         else:
1445             options.project_file_path = foundProject
1446             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1447     
1448     # Remove the products that are filtered by the --without_properties option
1449     if options.without_properties:
1450         app = runner.cfg.APPLICATION
1451         logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1452         prop, value = options.without_properties
1453         update_config(runner.cfg, prop, value)
1454         logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1455
1456     # Remove from config the products that have the not_in_package property
1457     update_config(runner.cfg, "not_in_package", "yes")
1458     
1459     # get the name of the archive or build it
1460     if options.name:
1461         if os.path.basename(options.name) == options.name:
1462             # only a name (not a path)
1463             archive_name = options.name           
1464             dir_name = package_default_path
1465         else:
1466             archive_name = os.path.basename(options.name)
1467             dir_name = os.path.dirname(options.name)
1468         
1469         # suppress extension
1470         if archive_name[-len(".tgz"):] == ".tgz":
1471             archive_name = archive_name[:-len(".tgz")]
1472         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1473             archive_name = archive_name[:-len(".tar.gz")]
1474         
1475     else:
1476         archive_name=""
1477         dir_name = package_default_path
1478         if options.binaries or options.sources:
1479             archive_name = runner.cfg.APPLICATION.name
1480
1481         if options.binaries:
1482             archive_name += "-"+runner.cfg.VARS.dist
1483             
1484         if options.sources:
1485             archive_name += "-SRC"
1486             if options.with_vcs:
1487                 archive_name += "-VCS"
1488
1489         if options.sat:
1490             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1491
1492         if options.project:
1493             if options.sat:
1494                 archive_name += "_" 
1495             project_name = options.project
1496             archive_name += ("satproject_" + project_name)
1497  
1498         if len(archive_name)==0: # no option worked 
1499             msg = _("Error: Cannot name the archive\n"
1500                     " check if at least one of the following options was "
1501                     "selected : --binaries, --sources, --project or"
1502                     " --salometools")
1503             logger.write(src.printcolors.printcError(msg), 1)
1504             logger.write("\n", 1)
1505             return 1
1506  
1507     path_targz = os.path.join(dir_name, archive_name + ".tgz")
1508     
1509     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1510
1511     # Create a working directory for all files that are produced during the
1512     # package creation and that will be removed at the end of the command
1513     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1514     src.ensure_path_exists(tmp_working_dir)
1515     logger.write("\n", 5)
1516     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1517     
1518     logger.write("\n", 3)
1519
1520     msg = _("Preparation of files to add to the archive")
1521     logger.write(src.printcolors.printcLabel(msg), 2)
1522     logger.write("\n", 2)
1523     
1524     d_files_to_add={}  # content of the archive
1525
1526     # a dict to hold paths that will need to be substitute for users recompilations
1527     d_paths_to_substitute={}  
1528
1529     if options.binaries:
1530         d_bin_files_to_add = binary_package(runner.cfg,
1531                                             logger,
1532                                             options,
1533                                             tmp_working_dir)
1534         # for all binaries dir, store the substitution that will be required 
1535         # for extra compilations
1536         for key in d_bin_files_to_add:
1537             if key.endswith("(bin)"):
1538                 source_dir = d_bin_files_to_add[key][0]
1539                 path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" +\
1540                    runner.cfg.VARS.dist,runner.cfg.INTERNAL.config.install_dir)
1541                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1542                     # if basename is the same we will just substitute the dirname 
1543                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1544                         os.path.dirname(path_in_archive)
1545                 else:
1546                     d_paths_to_substitute[source_dir]=path_in_archive
1547
1548         d_files_to_add.update(d_bin_files_to_add)
1549
1550     if options.sources:
1551         d_files_to_add.update(source_package(runner,
1552                                         runner.cfg,
1553                                         logger, 
1554                                         options,
1555                                         tmp_working_dir))
1556         if options.binaries:
1557             # for archives with bin and sources we provide a shell script able to 
1558             # install binaries for compilation
1559             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1560                                                       tmp_working_dir,
1561                                                       d_paths_to_substitute,
1562                                                       "install_bin.sh")
1563             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1564             logger.write("substitutions that need to be done later : \n", 5)
1565             logger.write(str(d_paths_to_substitute), 5)
1566             logger.write("\n", 5)
1567     else:
1568         # --salomeTool option is not considered when --sources is selected, as this option
1569         # already brings salomeTool!
1570         if options.sat:
1571             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1572                                   options, logger))
1573         
1574     if options.project:
1575         DBG.write("config for package %s" % project_name, runner.cfg)
1576         d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1577
1578     if not(d_files_to_add):
1579         msg = _("Error: Empty dictionnary to build the archive!\n")
1580         logger.write(src.printcolors.printcError(msg), 1)
1581         logger.write("\n", 1)
1582         return 1
1583
1584     # Add the README file in the package
1585     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1586     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1587
1588     # Add the additional files of option add_files
1589     if options.add_files:
1590         for file_path in options.add_files:
1591             if not os.path.exists(file_path):
1592                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1593                 continue
1594             file_name = os.path.basename(file_path)
1595             d_files_to_add[file_name] = (file_path, file_name)
1596
1597     logger.write("\n", 2)
1598     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1599     logger.write("\n", 2)
1600     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1601
1602     res = 0
1603     try:
1604         # Creating the object tarfile
1605         tar = tarfile.open(path_targz, mode='w:gz')
1606         
1607         # get the filtering function if needed
1608         filter_function = exclude_VCS_and_extensions
1609
1610         # Add the files to the tarfile object
1611         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1612         tar.close()
1613     except KeyboardInterrupt:
1614         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1615         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1616         # remove the working directory
1617         shutil.rmtree(tmp_working_dir)
1618         logger.write(_("OK"), 1)
1619         logger.write(_("\n"), 1)
1620         return 1
1621     
1622     # case if no application, only package sat as 'sat package -t'
1623     try:
1624         app = runner.cfg.APPLICATION
1625     except:
1626         app = None
1627
1628     # unconditionaly remove the tmp_local_working_dir
1629     if app is not None:
1630         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1631         if os.path.isdir(tmp_local_working_dir):
1632             shutil.rmtree(tmp_local_working_dir)
1633
1634     # remove the tmp directory, unless user has registered as developer
1635     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1636         shutil.rmtree(tmp_working_dir)
1637     
1638     # Print again the path of the package
1639     logger.write("\n", 2)
1640     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1641     
1642     return res