Salome HOME
sat #17206 pip management : management of environment and packages
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28
29 import src
30
31 from application import get_SALOME_modules
32 import src.debug as DBG
33
34 BINARY = "binary"
35 SOURCE = "Source"
36 PROJECT = "Project"
37 SAT = "Sat"
38
39 ARCHIVE_DIR = "ARCHIVES"
40 PROJECT_DIR = "PROJECT"
41
42 IGNORED_DIRS = [".git", ".svn"]
43 IGNORED_EXTENSIONS = []
44
45 PROJECT_TEMPLATE = """#!/usr/bin/env python
46 #-*- coding:utf-8 -*-
47
48 # The path to the archive root directory
49 root_path : $PWD + "/../"
50 # path to the PROJECT
51 project_path : $PWD + "/"
52
53 # Where to search the archives of the products
54 ARCHIVEPATH : $root_path + "ARCHIVES"
55 # Where to search the pyconf of the applications
56 APPLICATIONPATH : $project_path + "applications/"
57 # Where to search the pyconf of the products
58 PRODUCTPATH : $project_path + "products/"
59 # Where to search the pyconf of the jobs of the project
60 JOBPATH : $project_path + "jobs/"
61 # Where to search the pyconf of the machines of the project
62 MACHINEPATH : $project_path + "machines/"
63 """
64
65 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
66 #-*- coding:utf-8 -*-
67
68   LOCAL :
69   {
70     base : 'default'
71     workdir : 'default'
72     log_dir : 'default'
73     archive_dir : 'default'
74     VCS : None
75     tag : None
76   }
77
78 PROJECTS :
79 {
80 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
81 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
82 }
83 """)
84
85 # Define all possible option for the package command :  sat package <options>
86 parser = src.options.Options()
87 parser.add_option('b', 'binaries', 'boolean', 'binaries',
88     _('Optional: Produce a binary package.'), False)
89 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
90     _('Optional: Only binary package: produce the archive even if '
91       'there are some missing products.'), False)
92 parser.add_option('s', 'sources', 'boolean', 'sources',
93     _('Optional: Produce a compilable archive of the sources of the '
94       'application.'), False)
95 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
96     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
97       'Sat prepare will use VCS mode instead to retrieve them'),
98     False)
99 parser.add_option('', 'ftp', 'boolean', 'ftp',
100     _('Optional: Do not embed archives for products in archive mode.' 
101     'Sat prepare will use ftp instead to retrieve them'),
102     False)
103 parser.add_option('p', 'project', 'string', 'project',
104     _('Optional: Produce an archive that contains a project.'), "")
105 parser.add_option('t', 'salometools', 'boolean', 'sat',
106     _('Optional: Produce an archive that contains salomeTools.'), False)
107 parser.add_option('n', 'name', 'string', 'name',
108     _('Optional: The name or full path of the archive.'), None)
109 parser.add_option('', 'add_files', 'list2', 'add_files',
110     _('Optional: The list of additional files to add to the archive.'), [])
111 parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
112     _('Optional: do not add commercial licence.'), False)
113 parser.add_option('', 'without_properties', 'properties', 'without_properties',
114     _('Optional: Filter the products by their properties.\n\tSyntax: '
115       '--without_properties <property>:<value>'))
116
117
118 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
119     '''Create an archive containing all directories and files that are given in
120        the d_content argument.
121     
122     :param tar tarfile: The tarfile instance used to make the archive.
123     :param name_archive str: The name of the archive to make.
124     :param d_content dict: The dictionary that contain all directories and files
125                            to add in the archive.
126                            d_content[label] = 
127                                         (path_on_local_machine, path_in_archive)
128     :param logger Logger: the logging instance
129     :param f_exclude Function: the function that filters
130     :return: 0 if success, 1 if not.
131     :rtype: int
132     '''
133     # get the max length of the messages in order to make the display
134     max_len = len(max(d_content.keys(), key=len))
135     
136     success = 0
137     # loop over each directory or file stored in the d_content dictionary
138     names = sorted(d_content.keys())
139     DBG.write("add tar names", names)
140
141     for name in names:
142         # display information
143         len_points = max_len - len(name) + 3
144         local_path, archive_path = d_content[name]
145         in_archive = os.path.join(name_archive, archive_path)
146         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
147         # Get the local path and the path in archive 
148         # of the directory or file to add
149         # Add it in the archive
150         try:
151             tar.add(local_path, arcname=in_archive, exclude=f_exclude)
152             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
153         except Exception as e:
154             logger.write(src.printcolors.printcError(_("KO ")), 3)
155             logger.write(str(e), 3)
156             success = 1
157         logger.write("\n", 3)
158     return success
159
160 def exclude_VCS_and_extensions(filename):
161     ''' The function that is used to exclude from package the link to the 
162         VCS repositories (like .git)
163
164     :param filename Str: The filname to exclude (or not).
165     :return: True if the file has to be exclude
166     :rtype: Boolean
167     '''
168     for dir_name in IGNORED_DIRS:
169         if dir_name in filename:
170             return True
171     for extension in IGNORED_EXTENSIONS:
172         if filename.endswith(extension):
173             return True
174     return False
175
176 def produce_relative_launcher(config,
177                               logger,
178                               file_dir,
179                               file_name,
180                               binaries_dir_name,
181                               with_commercial=True):
182     '''Create a specific SALOME launcher for the binary package. This launcher 
183        uses relative paths.
184     
185     :param config Config: The global configuration.
186     :param logger Logger: the logging instance
187     :param file_dir str: the directory where to put the launcher
188     :param file_name str: The launcher name
189     :param binaries_dir_name str: the name of the repository where the binaries
190                                   are, in the archive.
191     :return: the path of the produced launcher
192     :rtype: str
193     '''
194     
195     # get KERNEL installation path 
196     kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
197
198     # set kernel bin dir (considering fhs property)
199     kernel_cfg = src.product.get_product_config(config, "KERNEL")
200     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
201         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
202     else:
203         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
204
205     # check if the application contains an application module
206     # check also if the application has a distene product, 
207     # in this case get its licence file name
208     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
209     salome_application_name="Not defined" 
210     distene_licence_file_name=False
211     for prod_name, prod_info in l_product_info:
212         # look for a "salome application" and a distene product
213         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
214             distene_licence_file_name = src.product.product_has_licence(prod_info, 
215                                             config.PATHS.LICENCEPATH) 
216         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
217             salome_application_name=prod_info.name
218
219     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
220     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
221     if salome_application_name == "Not defined":
222         app_root_dir=kernel_root_dir
223     else:
224         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
225
226     # Get the launcher template and do substitutions
227     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
228         withProfile = src.fileEnviron.withProfile3
229     else:
230         withProfile = src.fileEnviron.withProfile
231
232     withProfile = withProfile.replace(
233         "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
234         "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + app_root_dir + "'")
235     withProfile = withProfile.replace(
236         " 'BIN_KERNEL_INSTALL_DIR'",
237         " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
238
239     before, after = withProfile.split("# here your local standalone environment\n")
240
241     # create an environment file writer
242     writer = src.environment.FileEnvWriter(config,
243                                            logger,
244                                            file_dir,
245                                            src_root=None)
246     
247     filepath = os.path.join(file_dir, file_name)
248     # open the file and write into it
249     launch_file = open(filepath, "w")
250     launch_file.write(before)
251     # Write
252     writer.write_cfgForPy_file(launch_file,
253                                for_package = binaries_dir_name,
254                                with_commercial=with_commercial)
255     launch_file.write(after)
256     launch_file.close()
257     
258     # Little hack to put out_dir_Path outside the strings
259     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
260     
261     # A hack to put a call to a file for distene licence.
262     # It does nothing to an application that has no distene product
263     if distene_licence_file_name:
264         logger.write("Application has a distene licence file! We use it in package launcher", 5)
265         hack_for_distene_licence(filepath, distene_licence_file_name)
266        
267     # change the rights in order to make the file executable for everybody
268     os.chmod(filepath,
269              stat.S_IRUSR |
270              stat.S_IRGRP |
271              stat.S_IROTH |
272              stat.S_IWUSR |
273              stat.S_IXUSR |
274              stat.S_IXGRP |
275              stat.S_IXOTH)
276
277     return filepath
278
279 def hack_for_distene_licence(filepath, licence_file):
280     '''Replace the distene licence env variable by a call to a file.
281     
282     :param filepath Str: The path to the launcher to modify.
283     '''  
284     shutil.move(filepath, filepath + "_old")
285     fileout= filepath
286     filein = filepath + "_old"
287     fin = open(filein, "r")
288     fout = open(fileout, "w")
289     text = fin.readlines()
290     # Find the Distene section
291     num_line = -1
292     for i,line in enumerate(text):
293         if "# Set DISTENE License" in line:
294             num_line = i
295             break
296     if num_line == -1:
297         # No distene product, there is nothing to do
298         fin.close()
299         for line in text:
300             fout.write(line)
301         fout.close()
302         return
303     del text[num_line +1]
304     del text[num_line +1]
305     text_to_insert ="""    try:
306         distene_licence_file="%s"
307         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
308             import importlib.util
309             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
310             distene=importlib.util.module_from_spec(spec_dist)
311             spec_dist.loader.exec_module(distene)
312         else:
313             import imp
314             distene = imp.load_source('distene_licence', distene_licence_file)
315         distene.set_distene_variables(context)
316     except:
317         pass\n"""  % licence_file
318     text.insert(num_line + 1, text_to_insert)
319     for line in text:
320         fout.write(line)
321     fin.close()    
322     fout.close()
323     return
324     
325 def produce_relative_env_files(config,
326                               logger,
327                               file_dir,
328                               binaries_dir_name):
329     '''Create some specific environment files for the binary package. These 
330        files use relative paths.
331     
332     :param config Config: The global configuration.
333     :param logger Logger: the logging instance
334     :param file_dir str: the directory where to put the files
335     :param binaries_dir_name str: the name of the repository where the binaries
336                                   are, in the archive.
337     :return: the list of path of the produced environment files
338     :rtype: List
339     '''  
340     # create an environment file writer
341     writer = src.environment.FileEnvWriter(config,
342                                            logger,
343                                            file_dir,
344                                            src_root=None)
345     
346     if src.architecture.is_windows():
347       shell = "bat"
348       filename  = "env_launch.bat"
349     else:
350       shell = "bash"
351       filename  = "env_launch.sh"
352
353     # Write
354     filepath = writer.write_env_file(filename,
355                           False, # for launch
356                           shell,
357                           for_package = binaries_dir_name)
358
359     # Little hack to put out_dir_Path as environment variable
360     src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
361
362     # change the rights in order to make the file executable for everybody
363     os.chmod(filepath,
364              stat.S_IRUSR |
365              stat.S_IRGRP |
366              stat.S_IROTH |
367              stat.S_IWUSR |
368              stat.S_IXUSR |
369              stat.S_IXGRP |
370              stat.S_IXOTH)
371     
372     return filepath
373
374 def produce_install_bin_file(config,
375                              logger,
376                              file_dir,
377                              d_sub,
378                              file_name):
379     '''Create a bash shell script which do substitutions in BIRARIES dir 
380        in order to use it for extra compilations.
381     
382     :param config Config: The global configuration.
383     :param logger Logger: the logging instance
384     :param file_dir str: the directory where to put the files
385     :param d_sub, dict: the dictionnary that contains the substitutions to be done
386     :param file_name str: the name of the install script file
387     :return: the produced file
388     :rtype: str
389     '''  
390     # Write
391     filepath = os.path.join(file_dir, file_name)
392     # open the file and write into it
393     # use codec utf-8 as sat variables are in unicode
394     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
395         installbin_template_path = os.path.join(config.VARS.internal_dir,
396                                         "INSTALL_BIN.template")
397         
398         # build the name of the directory that will contain the binaries
399         binaries_dir_name = "BINARIES-" + config.VARS.dist
400         # build the substitution loop
401         loop_cmd = "for f in $(grep -RIl"
402         for key in d_sub:
403             loop_cmd += " -e "+ key
404         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
405                     '); do\n     sed -i "\n'
406         for key in d_sub:
407             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
408         loop_cmd += '            " $f\ndone'
409
410         d={}
411         d["BINARIES_DIR"] = binaries_dir_name
412         d["SUBSTITUTION_LOOP"]=loop_cmd
413         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
414         
415         # substitute the template and write it in file
416         content=src.template.substitute(installbin_template_path, d)
417         installbin_file.write(content)
418         # change the rights in order to make the file executable for everybody
419         os.chmod(filepath,
420                  stat.S_IRUSR |
421                  stat.S_IRGRP |
422                  stat.S_IROTH |
423                  stat.S_IWUSR |
424                  stat.S_IXUSR |
425                  stat.S_IXGRP |
426                  stat.S_IXOTH)
427     
428     return filepath
429
430 def product_appli_creation_script(config,
431                                   logger,
432                                   file_dir,
433                                   binaries_dir_name):
434     '''Create a script that can produce an application (EDF style) in the binary
435        package.
436     
437     :param config Config: The global configuration.
438     :param logger Logger: the logging instance
439     :param file_dir str: the directory where to put the file
440     :param binaries_dir_name str: the name of the repository where the binaries
441                                   are, in the archive.
442     :return: the path of the produced script file
443     :rtype: Str
444     '''
445     template_name = "create_appli.py.for_bin_packages.template"
446     template_path = os.path.join(config.VARS.internal_dir, template_name)
447     text_to_fill = open(template_path, "r").read()
448     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
449                                         '"' + binaries_dir_name + '"')
450     
451     text_to_add = ""
452     for product_name in get_SALOME_modules(config):
453         product_info = src.product.get_product_config(config, product_name)
454        
455         if src.product.product_is_smesh_plugin(product_info):
456             continue
457
458         if 'install_dir' in product_info and bool(product_info.install_dir):
459             if src.product.product_is_cpp(product_info):
460                 # cpp module
461                 for cpp_name in src.product.get_product_components(product_info):
462                     line_to_add = ("<module name=\"" + 
463                                    cpp_name + 
464                                    "\" gui=\"yes\" path=\"''' + "
465                                    "os.path.join(dir_bin_name, \"" + 
466                                    cpp_name + "\") + '''\"/>")
467             else:
468                 # regular module
469                 line_to_add = ("<module name=\"" + 
470                                product_name + 
471                                "\" gui=\"yes\" path=\"''' + "
472                                "os.path.join(dir_bin_name, \"" + 
473                                product_name + "\") + '''\"/>")
474             text_to_add += line_to_add + "\n"
475     
476     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
477     
478     tmp_file_path = os.path.join(file_dir, "create_appli.py")
479     ff = open(tmp_file_path, "w")
480     ff.write(filled_text)
481     ff.close()
482     
483     # change the rights in order to make the file executable for everybody
484     os.chmod(tmp_file_path,
485              stat.S_IRUSR |
486              stat.S_IRGRP |
487              stat.S_IROTH |
488              stat.S_IWUSR |
489              stat.S_IXUSR |
490              stat.S_IXGRP |
491              stat.S_IXOTH)
492     
493     return tmp_file_path
494
495 def binary_package(config, logger, options, tmp_working_dir):
496     '''Prepare a dictionary that stores all the needed directories and files to
497        add in a binary package.
498     
499     :param config Config: The global configuration.
500     :param logger Logger: the logging instance
501     :param options OptResult: the options of the launched command
502     :param tmp_working_dir str: The temporary local directory containing some 
503                                 specific directories or files needed in the 
504                                 binary package
505     :return: the dictionary that stores all the needed directories and files to
506              add in a binary package.
507              {label : (path_on_local_machine, path_in_archive)}
508     :rtype: dict
509     '''
510
511     # Get the list of product installation to add to the archive
512     l_products_name = sorted(config.APPLICATION.products.keys())
513     l_product_info = src.product.get_products_infos(l_products_name,
514                                                     config)
515     l_install_dir = []
516     l_source_dir = []
517     l_not_installed = []
518     l_sources_not_present = []
519     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
520     if ("APPLICATION" in config  and
521         "properties"  in config.APPLICATION  and
522         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
523         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
524             generate_mesa_launcher=True
525
526     for prod_name, prod_info in l_product_info:
527         # skip product with property not_in_package set to yes
528         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
529             continue  
530
531         # Add the sources of the products that have the property 
532         # sources_in_package : "yes"
533         if src.get_property_in_product_cfg(prod_info,
534                                            "sources_in_package") == "yes":
535             if os.path.exists(prod_info.source_dir):
536                 l_source_dir.append((prod_name, prod_info.source_dir))
537             else:
538                 l_sources_not_present.append(prod_name)
539
540         # ignore the native and fixed products for install directories
541         if (src.product.product_is_native(prod_info) 
542                 or src.product.product_is_fixed(prod_info)
543                 or not src.product.product_compiles(prod_info)):
544             continue
545         if src.product.check_installation(prod_info):
546             l_install_dir.append((prod_name, prod_info.install_dir))
547         else:
548             l_not_installed.append(prod_name)
549         
550         # Add also the cpp generated modules (if any)
551         if src.product.product_is_cpp(prod_info):
552             # cpp module
553             for name_cpp in src.product.get_product_components(prod_info):
554                 install_dir = os.path.join(config.APPLICATION.workdir,
555                                            config.INTERNAL.config.install_dir,
556                                            name_cpp) 
557                 if os.path.exists(install_dir):
558                     l_install_dir.append((name_cpp, install_dir))
559                 else:
560                     l_not_installed.append(name_cpp)
561         
562     # check the name of the directory that (could) contains the binaries 
563     # from previous detar
564     binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
565     if os.path.exists(binaries_from_detar):
566          logger.write("""
567 WARNING: existing binaries directory from previous detar installation:
568          %s
569          To make new package from this, you have to: 
570          1) install binaries in INSTALL directory with the script "install_bin.sh" 
571             see README file for more details
572          2) or recompile everything in INSTALL with "sat compile" command 
573             this step is long, and requires some linux packages to be installed 
574             on your system\n
575 """ % binaries_from_detar)
576     
577     # Print warning or error if there are some missing products
578     if len(l_not_installed) > 0:
579         text_missing_prods = ""
580         for p_name in l_not_installed:
581             text_missing_prods += "-" + p_name + "\n"
582         if not options.force_creation:
583             msg = _("ERROR: there are missing products installations:")
584             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
585                                      text_missing_prods),
586                          1)
587             return None
588         else:
589             msg = _("WARNING: there are missing products installations:")
590             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
591                                      text_missing_prods),
592                          1)
593
594     # Do the same for sources
595     if len(l_sources_not_present) > 0:
596         text_missing_prods = ""
597         for p_name in l_sources_not_present:
598             text_missing_prods += "-" + p_name + "\n"
599         if not options.force_creation:
600             msg = _("ERROR: there are missing products sources:")
601             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
602                                      text_missing_prods),
603                          1)
604             return None
605         else:
606             msg = _("WARNING: there are missing products sources:")
607             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
608                                      text_missing_prods),
609                          1)
610  
611     # construct the name of the directory that will contain the binaries
612     binaries_dir_name = "BINARIES-" + config.VARS.dist
613     
614     # construct the correlation table between the product names, there 
615     # actual install directories and there install directory in archive
616     d_products = {}
617     for prod_name, install_dir in l_install_dir:
618         path_in_archive = os.path.join(binaries_dir_name, prod_name)
619         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
620         
621     for prod_name, source_dir in l_source_dir:
622         path_in_archive = os.path.join("SOURCES", prod_name)
623         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
624
625     # for packages of SALOME applications including KERNEL, 
626     # we produce a salome launcher or a virtual application (depending on salome version)
627     if 'KERNEL' in config.APPLICATION.products:
628         VersionSalome = src.get_salome_version(config)
629         # Case where SALOME has the launcher that uses the SalomeContext API
630         if VersionSalome >= 730:
631             # create the relative launcher and add it to the files to add
632             launcher_name = src.get_launcher_name(config)
633             launcher_package = produce_relative_launcher(config,
634                                                  logger,
635                                                  tmp_working_dir,
636                                                  launcher_name,
637                                                  binaries_dir_name,
638                                                  not(options.without_commercial))
639             d_products["launcher"] = (launcher_package, launcher_name)
640
641             # if the application contains mesa products, we generate in addition to the 
642             # classical salome launcher a launcher using mesa and called mesa_salome 
643             # (the mesa launcher will be used for remote usage through ssh).
644             if generate_mesa_launcher:
645                 #if there is one : store the use_mesa property
646                 restore_use_mesa_option=None
647                 if ('properties' in config.APPLICATION and 
648                     'use_mesa' in config.APPLICATION.properties):
649                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
650
651                 # activate mesa property, and generate a mesa launcher
652                 src.activate_mesa_property(config)  #activate use_mesa property
653                 launcher_mesa_name="mesa_"+launcher_name
654                 launcher_package_mesa = produce_relative_launcher(config,
655                                                      logger,
656                                                      tmp_working_dir,
657                                                      launcher_mesa_name,
658                                                      binaries_dir_name,
659                                                      not(options.without_commercial))
660                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
661
662                 # if there was a use_mesa value, we restore it
663                 # else we set it to the default value "no"
664                 if restore_use_mesa_option != None:
665                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
666                 else:
667                     config.APPLICATION.properties.use_mesa="no"
668
669             if options.sources:
670                 # if we mix binaries and sources, we add a copy of the launcher, 
671                 # prefixed  with "bin",in order to avoid clashes
672                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
673         else:
674             # Provide a script for the creation of an application EDF style
675             appli_script = product_appli_creation_script(config,
676                                                         logger,
677                                                         tmp_working_dir,
678                                                         binaries_dir_name)
679             
680             d_products["appli script"] = (appli_script, "create_appli.py")
681
682     # Put also the environment file
683     env_file = produce_relative_env_files(config,
684                                            logger,
685                                            tmp_working_dir,
686                                            binaries_dir_name)
687
688     if src.architecture.is_windows():
689       filename  = "env_launch.bat"
690     else:
691       filename  = "env_launch.sh"
692     d_products["environment file"] = (env_file, filename)      
693
694     return d_products
695
696 def source_package(sat, config, logger, options, tmp_working_dir):
697     '''Prepare a dictionary that stores all the needed directories and files to
698        add in a source package.
699     
700     :param config Config: The global configuration.
701     :param logger Logger: the logging instance
702     :param options OptResult: the options of the launched command
703     :param tmp_working_dir str: The temporary local directory containing some 
704                                 specific directories or files needed in the 
705                                 binary package
706     :return: the dictionary that stores all the needed directories and files to
707              add in a source package.
708              {label : (path_on_local_machine, path_in_archive)}
709     :rtype: dict
710     '''
711     
712     d_archives={}
713     # Get all the products that are prepared using an archive
714     # unless ftp mode is specified (in this case the user of the
715     # archive will get the sources through the ftp mode of sat prepare
716     if not options.ftp:
717         logger.write("Find archive products ... ")
718         d_archives, l_pinfo_vcs = get_archives(config, logger)
719         logger.write("Done\n")
720
721     d_archives_vcs = {}
722     if not options.with_vcs and len(l_pinfo_vcs) > 0:
723         # Make archives with the products that are not prepared using an archive
724         # (git, cvs, svn, etc)
725         logger.write("Construct archives for vcs products ... ")
726         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
727                                           sat,
728                                           config,
729                                           logger,
730                                           tmp_working_dir)
731         logger.write("Done\n")
732
733     # Create a project
734     logger.write("Create the project ... ")
735     d_project = create_project_for_src_package(config,
736                                                tmp_working_dir,
737                                                options.with_vcs,
738                                                options.ftp)
739     logger.write("Done\n")
740     
741     # Add salomeTools
742     tmp_sat = add_salomeTools(config, tmp_working_dir)
743     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
744     
745     # Add a sat symbolic link if not win
746     if not src.architecture.is_windows():
747         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
748         try:
749             t = os.getcwd()
750         except:
751             # In the jobs, os.getcwd() can fail
752             t = config.LOCAL.workdir
753         os.chdir(tmp_working_dir)
754         if os.path.lexists(tmp_satlink_path):
755             os.remove(tmp_satlink_path)
756         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
757         os.chdir(t)
758         
759         d_sat["sat link"] = (tmp_satlink_path, "sat")
760     
761     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
762     return d_source
763
764 def get_archives(config, logger):
765     '''Find all the products that are get using an archive and all the products
766        that are get using a vcs (git, cvs, svn) repository.
767     
768     :param config Config: The global configuration.
769     :param logger Logger: the logging instance
770     :return: the dictionary {name_product : 
771              (local path of its archive, path in the package of its archive )}
772              and the list of specific configuration corresponding to the vcs 
773              products
774     :rtype: (Dict, List)
775     '''
776     # Get the list of product informations
777     l_products_name = config.APPLICATION.products.keys()
778     l_product_info = src.product.get_products_infos(l_products_name,
779                                                     config)
780     d_archives = {}
781     l_pinfo_vcs = []
782     for p_name, p_info in l_product_info:
783         # skip product with property not_in_package set to yes
784         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
785             continue  
786         # ignore the native and fixed products
787         if (src.product.product_is_native(p_info) 
788                 or src.product.product_is_fixed(p_info)):
789             continue
790         if p_info.get_source == "archive":
791             archive_path = p_info.archive_info.archive_name
792             archive_name = os.path.basename(archive_path)
793             d_archives[p_name] = (archive_path,
794                                   os.path.join(ARCHIVE_DIR, archive_name))
795             if (src.appli_test_property(config,"pip", "yes") and 
796                 src.product.product_test_property(p_info,"pip", "yes")):
797                 # if pip mode is activated, and product is managed by pip
798                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
799                 pip_wheel_pattern=os.path.join(pip_wheels_dir, 
800                     "%s-%s*" % (p_info.name, p_info.version))
801                 print "CNC  pip_wheel_pattern = ",pip_wheel_pattern
802                 pip_wheel_path=glob.glob(pip_wheel_pattern)
803                 msg_pip_not_found="Error in get_archive, pip wheel for "\
804                                   "product %s-%s was not found in %s directory"
805                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
806                                   "product %s-%s were found in %s directory"
807                 if len(pip_wheel_path)==0:
808                     raise src.SatException(msg_pip_not_found %\
809                         (p_info.name, p_info.version, pip_wheels_dir))
810                 if len(pip_wheel_path)>1:
811                     raise src.SatException(msg_pip_two_or_more %\
812                         (p_info.name, p_info.version, pip_wheels_dir))
813
814                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
815                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0], 
816                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
817         else:
818             # this product is not managed by archive, 
819             # an archive of the vcs directory will be created by get_archive_vcs
820             l_pinfo_vcs.append((p_name, p_info)) 
821             
822     return d_archives, l_pinfo_vcs
823
824 def add_salomeTools(config, tmp_working_dir):
825     '''Prepare a version of salomeTools that has a specific local.pyconf file 
826        configured for a source package.
827
828     :param config Config: The global configuration.
829     :param tmp_working_dir str: The temporary local directory containing some 
830                                 specific directories or files needed in the 
831                                 source package
832     :return: The path to the local salomeTools directory to add in the package
833     :rtype: str
834     '''
835     # Copy sat in the temporary working directory
836     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
837     sat_running_path = src.Path(config.VARS.salometoolsway)
838     sat_running_path.copy(sat_tmp_path)
839     
840     # Update the local.pyconf file that contains the path to the project
841     local_pyconf_name = "local.pyconf"
842     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
843     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
844     # Remove the .pyconf file in the root directory of salomeTools if there is
845     # any. (For example when launching jobs, a pyconf file describing the jobs 
846     # can be here and is not useful) 
847     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
848     for file_or_dir in files_or_dir_SAT:
849         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
850             file_path = os.path.join(tmp_working_dir,
851                                      "salomeTools",
852                                      file_or_dir)
853             os.remove(file_path)
854     
855     ff = open(local_pyconf_file, "w")
856     ff.write(LOCAL_TEMPLATE)
857     ff.close()
858     
859     return sat_tmp_path.path
860
861 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
862     '''For sources package that require that all products are get using an 
863        archive, one has to create some archive for the vcs products.
864        So this method calls the clean and source command of sat and then create
865        the archives.
866
867     :param l_pinfo_vcs List: The list of specific configuration corresponding to
868                              each vcs product
869     :param sat Sat: The Sat instance that can be called to clean and source the
870                     products
871     :param config Config: The global configuration.
872     :param logger Logger: the logging instance
873     :param tmp_working_dir str: The temporary local directory containing some 
874                                 specific directories or files needed in the 
875                                 source package
876     :return: the dictionary that stores all the archives to add in the source 
877              package. {label : (path_on_local_machine, path_in_archive)}
878     :rtype: dict
879     '''
880     # clean the source directory of all the vcs products, then use the source 
881     # command and thus construct an archive that will not contain the patches
882     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
883     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
884       logger.write(_("\nclean sources\n"))
885       args_clean = config.VARS.application
886       args_clean += " --sources --products "
887       args_clean += ",".join(l_prod_names)
888       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
889       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
890     if True:
891       # source
892       logger.write(_("get sources\n"))
893       args_source = config.VARS.application
894       args_source += " --products "
895       args_source += ",".join(l_prod_names)
896       svgDir = sat.cfg.APPLICATION.workdir
897       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
898       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
899       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
900       # DBG.write("sat config id", id(sat.cfg), True)
901       # shit as config is not same id() as for sat.source()
902       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
903       import source
904       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
905       
906       # make the new archives
907       d_archives_vcs = {}
908       for pn, pinfo in l_pinfo_vcs:
909           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
910           logger.write("make archive vcs '%s'\n" % path_archive)
911           d_archives_vcs[pn] = (path_archive,
912                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
913       sat.cfg.APPLICATION.workdir = svgDir
914       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
915     return d_archives_vcs
916
917 def make_archive(prod_name, prod_info, where):
918     '''Create an archive of a product by searching its source directory.
919
920     :param prod_name str: The name of the product.
921     :param prod_info Config: The specific configuration corresponding to the 
922                              product
923     :param where str: The path of the repository where to put the resulting 
924                       archive
925     :return: The path of the resulting archive
926     :rtype: str
927     '''
928     path_targz_prod = os.path.join(where, prod_name + ".tgz")
929     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
930     local_path = prod_info.source_dir
931     tar_prod.add(local_path,
932                  arcname=prod_name,
933                  exclude=exclude_VCS_and_extensions)
934     tar_prod.close()
935     return path_targz_prod       
936
937 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
938     '''Create a specific project for a source package.
939
940     :param config Config: The global configuration.
941     :param tmp_working_dir str: The temporary local directory containing some 
942                                 specific directories or files needed in the 
943                                 source package
944     :param with_vcs boolean: True if the package is with vcs products (not 
945                              transformed into archive products)
946     :param with_ftp boolean: True if the package use ftp servers to get archives
947     :return: The dictionary 
948              {"project" : (produced project, project path in the archive)}
949     :rtype: Dict
950     '''
951
952     # Create in the working temporary directory the full project tree
953     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
954     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
955                                          "products")
956     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
957                                          "products",
958                                          "compil_scripts")
959     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
960                                          "products",
961                                          "env_scripts")
962     patches_tmp_dir = os.path.join(project_tmp_dir,
963                                          "products",
964                                          "patches")
965     application_tmp_dir = os.path.join(project_tmp_dir,
966                                          "applications")
967     for directory in [project_tmp_dir,
968                       compil_scripts_tmp_dir,
969                       env_scripts_tmp_dir,
970                       patches_tmp_dir,
971                       application_tmp_dir]:
972         src.ensure_path_exists(directory)
973
974     # Create the pyconf that contains the information of the project
975     project_pyconf_name = "project.pyconf"        
976     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
977     ff = open(project_pyconf_file, "w")
978     ff.write(PROJECT_TEMPLATE)
979     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
980         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
981         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
982             ftp_path=ftp_path+":"+ftpserver
983         ftp_path+='"'
984         ff.write("# ftp servers where to search for prerequisite archives\n")
985         ff.write(ftp_path)
986     # add licence paths if any
987     if len(config.PATHS.LICENCEPATH) > 0:  
988         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
989         for path in config.PATHS.LICENCEPATH[1:]:
990             licence_path=licence_path+":"+path
991         licence_path+='"'
992         ff.write("\n# Where to search for licences\n")
993         ff.write(licence_path)
994         
995
996     ff.close()
997     
998     # Loop over the products to get there pyconf and all the scripts 
999     # (compilation, environment, patches)
1000     # and create the pyconf file to add to the project
1001     lproducts_name = config.APPLICATION.products.keys()
1002     l_products = src.product.get_products_infos(lproducts_name, config)
1003     for p_name, p_info in l_products:
1004         # skip product with property not_in_package set to yes
1005         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1006             continue  
1007         find_product_scripts_and_pyconf(p_name,
1008                                         p_info,
1009                                         config,
1010                                         with_vcs,
1011                                         compil_scripts_tmp_dir,
1012                                         env_scripts_tmp_dir,
1013                                         patches_tmp_dir,
1014                                         products_pyconf_tmp_dir)
1015     
1016     find_application_pyconf(config, application_tmp_dir)
1017     
1018     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1019     return d_project
1020
1021 def find_product_scripts_and_pyconf(p_name,
1022                                     p_info,
1023                                     config,
1024                                     with_vcs,
1025                                     compil_scripts_tmp_dir,
1026                                     env_scripts_tmp_dir,
1027                                     patches_tmp_dir,
1028                                     products_pyconf_tmp_dir):
1029     '''Create a specific pyconf file for a given product. Get its environment 
1030        script, its compilation script and patches and put it in the temporary
1031        working directory. This method is used in the source package in order to
1032        construct the specific project.
1033
1034     :param p_name str: The name of the product.
1035     :param p_info Config: The specific configuration corresponding to the 
1036                              product
1037     :param config Config: The global configuration.
1038     :param with_vcs boolean: True if the package is with vcs products (not 
1039                              transformed into archive products)
1040     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1041                                        scripts directory of the project.
1042     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1043                                     directory of the project.
1044     :param patches_tmp_dir str: The path to the temporary patch scripts 
1045                                 directory of the project.
1046     :param products_pyconf_tmp_dir str: The path to the temporary product 
1047                                         scripts directory of the project.
1048     '''
1049     
1050     # read the pyconf of the product
1051     product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
1052                                            config.PATHS.PRODUCTPATH)
1053     product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
1054
1055     # find the compilation script if any
1056     if src.product.product_has_script(p_info):
1057         compil_script_path = src.Path(p_info.compil_script)
1058         compil_script_path.copy(compil_scripts_tmp_dir)
1059         product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
1060                                                     p_info.compil_script)
1061     # find the environment script if any
1062     if src.product.product_has_env_script(p_info):
1063         env_script_path = src.Path(p_info.environ.env_script)
1064         env_script_path.copy(env_scripts_tmp_dir)
1065         product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
1066                                                 p_info.environ.env_script)
1067     # find the patches if any
1068     if src.product.product_has_patches(p_info):
1069         patches = src.pyconf.Sequence()
1070         for patch_path in p_info.patches:
1071             p_path = src.Path(patch_path)
1072             p_path.copy(patches_tmp_dir)
1073             patches.append(os.path.basename(patch_path), "")
1074
1075         product_pyconf_cfg[p_info.section].patches = patches
1076     
1077     if with_vcs:
1078         # put in the pyconf file the resolved values
1079         for info in ["git_info", "cvs_info", "svn_info"]:
1080             if info in p_info:
1081                 for key in p_info[info]:
1082                     product_pyconf_cfg[p_info.section][info][key] = p_info[
1083                                                                       info][key]
1084     else:
1085         # if the product is not archive, then make it become archive.
1086         if src.product.product_is_vcs(p_info):
1087             product_pyconf_cfg[p_info.section].get_source = "archive"
1088             if not "archive_info" in product_pyconf_cfg[p_info.section]:
1089                 product_pyconf_cfg[p_info.section].addMapping("archive_info",
1090                                         src.pyconf.Mapping(product_pyconf_cfg),
1091                                         "")
1092             product_pyconf_cfg[p_info.section
1093                               ].archive_info.archive_name = p_info.name + ".tgz"
1094     
1095     # write the pyconf file to the temporary project location
1096     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1097                                            p_name + ".pyconf")
1098     ff = open(product_tmp_pyconf_path, 'w')
1099     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1100     product_pyconf_cfg.__save__(ff, 1)
1101     ff.close()
1102
1103 def find_application_pyconf(config, application_tmp_dir):
1104     '''Find the application pyconf file and put it in the specific temporary 
1105        directory containing the specific project of a source package.
1106
1107     :param config Config: The global configuration.
1108     :param application_tmp_dir str: The path to the temporary application 
1109                                        scripts directory of the project.
1110     '''
1111     # read the pyconf of the application
1112     application_name = config.VARS.application
1113     application_pyconf_path = src.find_file_in_lpath(
1114                                             application_name + ".pyconf",
1115                                             config.PATHS.APPLICATIONPATH)
1116     application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1117     
1118     # Change the workdir
1119     application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1120                                     application_pyconf_cfg,
1121                                     src.pyconf.DOLLAR,
1122                                     'VARS.salometoolsway + $VARS.sep + ".."')
1123
1124     # Prevent from compilation in base
1125     application_pyconf_cfg.APPLICATION.no_base = "yes"
1126     
1127     #remove products that are not in config (which were filtered by --without_properties)
1128     for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1129         if product_name not in config.APPLICATION.products.keys():
1130             application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1131
1132     # write the pyconf file to the temporary application location
1133     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1134                                                application_name + ".pyconf")
1135
1136     ff = open(application_tmp_pyconf_path, 'w')
1137     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1138     application_pyconf_cfg.__save__(ff, 1)
1139     ff.close()
1140
1141 def sat_package(config, tmp_working_dir, options, logger):
1142     '''Prepare a dictionary that stores all the needed directories and files to
1143        add in a salomeTool package.
1144     
1145     :param tmp_working_dir str: The temporary local working directory 
1146     :param options OptResult: the options of the launched command
1147     :return: the dictionary that stores all the needed directories and files to
1148              add in a salomeTool package.
1149              {label : (path_on_local_machine, path_in_archive)}
1150     :rtype: dict
1151     '''
1152     d_project = {}
1153
1154     # we include sat himself
1155     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1156
1157     # and we overwrite local.pyconf with a clean wersion.
1158     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1159     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1160     local_cfg = src.pyconf.Config(local_file_path)
1161     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1162     local_cfg.LOCAL["base"] = "default"
1163     local_cfg.LOCAL["workdir"] = "default"
1164     local_cfg.LOCAL["log_dir"] = "default"
1165     local_cfg.LOCAL["archive_dir"] = "default"
1166     local_cfg.LOCAL["VCS"] = "None"
1167     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1168
1169     # if the archive contains a project, we write its relative path in local.pyconf
1170     if options.project:
1171         project_arch_path = os.path.join("projects", options.project, 
1172                                          os.path.basename(options.project_file_path))
1173         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1174
1175     ff = open(local_pyconf_tmp_path, 'w')
1176     local_cfg.__save__(ff, 1)
1177     ff.close()
1178     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1179     return d_project
1180     
1181
1182 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1183     '''Prepare a dictionary that stores all the needed directories and files to
1184        add in a project package.
1185     
1186     :param project_file_path str: The path to the local project.
1187     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1188     :param tmp_working_dir str: The temporary local directory containing some 
1189                                 specific directories or files needed in the 
1190                                 project package
1191     :param embedded_in_sat boolean : the project package is embedded in a sat package
1192     :return: the dictionary that stores all the needed directories and files to
1193              add in a project package.
1194              {label : (path_on_local_machine, path_in_archive)}
1195     :rtype: dict
1196     '''
1197     d_project = {}
1198     # Read the project file and get the directories to add to the package
1199     
1200     try: 
1201       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1202     except:
1203       logger.write("""
1204 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1205       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1206       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1207     
1208     paths = {"APPLICATIONPATH" : "applications",
1209              "PRODUCTPATH" : "products",
1210              "JOBPATH" : "jobs",
1211              "MACHINEPATH" : "machines"}
1212     if not ftp_mode:
1213         paths["ARCHIVEPATH"] = "archives"
1214
1215     # Loop over the project paths and add it
1216     project_file_name = os.path.basename(project_file_path)
1217     for path in paths:
1218         if path not in project_pyconf_cfg:
1219             continue
1220         if embedded_in_sat:
1221             dest_path = os.path.join("projects", name_project, paths[path])
1222             project_file_dest = os.path.join("projects", name_project, project_file_name)
1223         else:
1224             dest_path = paths[path]
1225             project_file_dest = project_file_name
1226
1227         # Add the directory to the files to add in the package
1228         d_project[path] = (project_pyconf_cfg[path], dest_path)
1229
1230         # Modify the value of the path in the package
1231         project_pyconf_cfg[path] = src.pyconf.Reference(
1232                                     project_pyconf_cfg,
1233                                     src.pyconf.DOLLAR,
1234                                     'project_path + "/' + paths[path] + '"')
1235     
1236     # Modify some values
1237     if "project_path" not in project_pyconf_cfg:
1238         project_pyconf_cfg.addMapping("project_path",
1239                                       src.pyconf.Mapping(project_pyconf_cfg),
1240                                       "")
1241     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1242                                                            src.pyconf.DOLLAR,
1243                                                            'PWD')
1244     # we don't want to export these two fields
1245     project_pyconf_cfg.__delitem__("file_path")
1246     project_pyconf_cfg.__delitem__("PWD")
1247     if ftp_mode:
1248         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1249     
1250     # Write the project pyconf file
1251     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1252     ff = open(project_pyconf_tmp_path, 'w')
1253     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1254     project_pyconf_cfg.__save__(ff, 1)
1255     ff.close()
1256     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1257     
1258     return d_project
1259
1260 def add_readme(config, options, where):
1261     readme_path = os.path.join(where, "README")
1262     with codecs.open(readme_path, "w", 'utf-8') as f:
1263
1264     # templates for building the header
1265         readme_header="""
1266 # This package was generated with sat $version
1267 # Date: $date
1268 # User: $user
1269 # Distribution : $dist
1270
1271 In the following, $$ROOT represents the directory where you have installed 
1272 SALOME (the directory where this file is located).
1273
1274 """
1275         readme_compilation_with_binaries="""
1276
1277 compilation based on the binaries used as prerequisites
1278 =======================================================
1279
1280 If you fail to compile the complete application (for example because
1281 you are not root on your system and cannot install missing packages), you
1282 may try a partial compilation based on the binaries.
1283 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1284 and do some substitutions on cmake and .la files (replace the build directories
1285 with local paths).
1286 The procedure to do it is:
1287  1) Remove or rename INSTALL directory if it exists
1288  2) Execute the shell script install_bin.sh:
1289  > cd $ROOT
1290  > ./install_bin.sh
1291  3) Use SalomeTool (as explained in Sources section) and compile only the 
1292     modules you need to (with -p option)
1293
1294 """
1295         readme_header_tpl=string.Template(readme_header)
1296         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1297                 "README_BIN.template")
1298         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1299                 "README_LAUNCHER.template")
1300         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1301                 "README_BIN_VIRTUAL_APP.template")
1302         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1303                 "README_SRC.template")
1304         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1305                 "README_PROJECT.template")
1306         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1307                 "README_SAT.template")
1308
1309         # prepare substitution dictionary
1310         d = dict()
1311         d['user'] = config.VARS.user
1312         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1313         d['version'] = src.get_salometool_version(config)
1314         d['dist'] = config.VARS.dist
1315         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1316
1317         if options.binaries or options.sources:
1318             d['application'] = config.VARS.application
1319             f.write("# Application: " + d['application'] + "\n")
1320             if 'KERNEL' in config.APPLICATION.products:
1321                 VersionSalome = src.get_salome_version(config)
1322                 # Case where SALOME has the launcher that uses the SalomeContext API
1323                 if VersionSalome >= 730:
1324                     d['launcher'] = config.APPLICATION.profile.launcher_name
1325                 else:
1326                     d['virtual_app'] = 'runAppli' # this info is not used now)
1327
1328         # write the specific sections
1329         if options.binaries:
1330             f.write(src.template.substitute(readme_template_path_bin, d))
1331             if "virtual_app" in d:
1332                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1333             if "launcher" in d:
1334                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1335
1336         if options.sources:
1337             f.write(src.template.substitute(readme_template_path_src, d))
1338
1339         if options.binaries and options.sources:
1340             f.write(readme_compilation_with_binaries)
1341
1342         if options.project:
1343             f.write(src.template.substitute(readme_template_path_pro, d))
1344
1345         if options.sat:
1346             f.write(src.template.substitute(readme_template_path_sat, d))
1347     
1348     return readme_path
1349
1350 def update_config(config, prop, value):
1351     '''Remove from config.APPLICATION.products the products that have the property given as input.
1352     
1353     :param config Config: The global config.
1354     :param prop str: The property to filter
1355     :param value str: The value of the property to filter
1356     '''
1357     # if there is no APPLICATION (ex sat package -t) : nothing to do
1358     if "APPLICATION" in config:
1359         l_product_to_remove = []
1360         for product_name in config.APPLICATION.products.keys():
1361             prod_cfg = src.product.get_product_config(config, product_name)
1362             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1363                 l_product_to_remove.append(product_name)
1364         for product_name in l_product_to_remove:
1365             config.APPLICATION.products.__delitem__(product_name)
1366
1367 def description():
1368     '''method that is called when salomeTools is called with --help option.
1369     
1370     :return: The text to display for the package command description.
1371     :rtype: str
1372     '''
1373     return _("""
1374 The package command creates a tar file archive of a product.
1375 There are four kinds of archive, which can be mixed:
1376
1377  1 - The binary archive. 
1378      It contains the product installation directories plus a launcher.
1379  2 - The sources archive. 
1380      It contains the product archives, a project (the application plus salomeTools).
1381  3 - The project archive. 
1382      It contains a project (give the project file path as argument).
1383  4 - The salomeTools archive. 
1384      It contains code utility salomeTools.
1385
1386 example:
1387  >> sat package SALOME-master --binaries --sources""")
1388   
1389 def run(args, runner, logger):
1390     '''method that is called when salomeTools is called with package parameter.
1391     '''
1392     
1393     # Parse the options
1394     (options, args) = parser.parse_args(args)
1395
1396     # Check that a type of package is called, and only one
1397     all_option_types = (options.binaries,
1398                         options.sources,
1399                         options.project not in ["", None],
1400                         options.sat)
1401
1402     # Check if no option for package type
1403     if all_option_types.count(True) == 0:
1404         msg = _("Error: Precise a type for the package\nUse one of the "
1405                 "following options: --binaries, --sources, --project or"
1406                 " --salometools")
1407         logger.write(src.printcolors.printcError(msg), 1)
1408         logger.write("\n", 1)
1409         return 1
1410     
1411     # The repository where to put the package if not Binary or Source
1412     package_default_path = runner.cfg.LOCAL.workdir
1413     
1414     # if the package contains binaries or sources:
1415     if options.binaries or options.sources:
1416         # Check that the command has been called with an application
1417         src.check_config_has_application(runner.cfg)
1418
1419         # Display information
1420         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1421                                                     runner.cfg.VARS.application), 1)
1422         
1423         # Get the default directory where to put the packages
1424         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1425         src.ensure_path_exists(package_default_path)
1426         
1427     # if the package contains a project:
1428     if options.project:
1429         # check that the project is visible by SAT
1430         projectNameFile = options.project + ".pyconf"
1431         foundProject = None
1432         for i in runner.cfg.PROJECTS.project_file_paths:
1433             baseName = os.path.basename(i)
1434             if baseName == projectNameFile:
1435                 foundProject = i
1436                 break
1437
1438         if foundProject is None:
1439             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1440             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1441 known projects are:
1442 %(2)s
1443
1444 Please add it in file:
1445 %(3)s""" % \
1446                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1447             logger.write(src.printcolors.printcError(msg), 1)
1448             logger.write("\n", 1)
1449             return 1
1450         else:
1451             options.project_file_path = foundProject
1452             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1453     
1454     # Remove the products that are filtered by the --without_properties option
1455     if options.without_properties:
1456         app = runner.cfg.APPLICATION
1457         logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1458         prop, value = options.without_properties
1459         update_config(runner.cfg, prop, value)
1460         logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1461
1462     # Remove from config the products that have the not_in_package property
1463     update_config(runner.cfg, "not_in_package", "yes")
1464     
1465     # get the name of the archive or build it
1466     if options.name:
1467         if os.path.basename(options.name) == options.name:
1468             # only a name (not a path)
1469             archive_name = options.name           
1470             dir_name = package_default_path
1471         else:
1472             archive_name = os.path.basename(options.name)
1473             dir_name = os.path.dirname(options.name)
1474         
1475         # suppress extension
1476         if archive_name[-len(".tgz"):] == ".tgz":
1477             archive_name = archive_name[:-len(".tgz")]
1478         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1479             archive_name = archive_name[:-len(".tar.gz")]
1480         
1481     else:
1482         archive_name=""
1483         dir_name = package_default_path
1484         if options.binaries or options.sources:
1485             archive_name = runner.cfg.APPLICATION.name
1486
1487         if options.binaries:
1488             archive_name += "-"+runner.cfg.VARS.dist
1489             
1490         if options.sources:
1491             archive_name += "-SRC"
1492             if options.with_vcs:
1493                 archive_name += "-VCS"
1494
1495         if options.sat:
1496             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1497
1498         if options.project:
1499             if options.sat:
1500                 archive_name += "_" 
1501             project_name = options.project
1502             archive_name += ("satproject_" + project_name)
1503  
1504         if len(archive_name)==0: # no option worked 
1505             msg = _("Error: Cannot name the archive\n"
1506                     " check if at least one of the following options was "
1507                     "selected : --binaries, --sources, --project or"
1508                     " --salometools")
1509             logger.write(src.printcolors.printcError(msg), 1)
1510             logger.write("\n", 1)
1511             return 1
1512  
1513     path_targz = os.path.join(dir_name, archive_name + ".tgz")
1514     
1515     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1516
1517     # Create a working directory for all files that are produced during the
1518     # package creation and that will be removed at the end of the command
1519     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1520     src.ensure_path_exists(tmp_working_dir)
1521     logger.write("\n", 5)
1522     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1523     
1524     logger.write("\n", 3)
1525
1526     msg = _("Preparation of files to add to the archive")
1527     logger.write(src.printcolors.printcLabel(msg), 2)
1528     logger.write("\n", 2)
1529     
1530     d_files_to_add={}  # content of the archive
1531
1532     # a dict to hold paths that will need to be substitute for users recompilations
1533     d_paths_to_substitute={}  
1534
1535     if options.binaries:
1536         d_bin_files_to_add = binary_package(runner.cfg,
1537                                             logger,
1538                                             options,
1539                                             tmp_working_dir)
1540         # for all binaries dir, store the substitution that will be required 
1541         # for extra compilations
1542         for key in d_bin_files_to_add:
1543             if key.endswith("(bin)"):
1544                 source_dir = d_bin_files_to_add[key][0]
1545                 path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" +\
1546                    runner.cfg.VARS.dist,runner.cfg.INTERNAL.config.install_dir)
1547                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1548                     # if basename is the same we will just substitute the dirname 
1549                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1550                         os.path.dirname(path_in_archive)
1551                 else:
1552                     d_paths_to_substitute[source_dir]=path_in_archive
1553
1554         d_files_to_add.update(d_bin_files_to_add)
1555
1556     if options.sources:
1557         d_files_to_add.update(source_package(runner,
1558                                         runner.cfg,
1559                                         logger, 
1560                                         options,
1561                                         tmp_working_dir))
1562         if options.binaries:
1563             # for archives with bin and sources we provide a shell script able to 
1564             # install binaries for compilation
1565             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1566                                                       tmp_working_dir,
1567                                                       d_paths_to_substitute,
1568                                                       "install_bin.sh")
1569             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1570             logger.write("substitutions that need to be done later : \n", 5)
1571             logger.write(str(d_paths_to_substitute), 5)
1572             logger.write("\n", 5)
1573     else:
1574         # --salomeTool option is not considered when --sources is selected, as this option
1575         # already brings salomeTool!
1576         if options.sat:
1577             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1578                                   options, logger))
1579         
1580     if options.project:
1581         DBG.write("config for package %s" % project_name, runner.cfg)
1582         d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1583
1584     if not(d_files_to_add):
1585         msg = _("Error: Empty dictionnary to build the archive!\n")
1586         logger.write(src.printcolors.printcError(msg), 1)
1587         logger.write("\n", 1)
1588         return 1
1589
1590     # Add the README file in the package
1591     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1592     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1593
1594     # Add the additional files of option add_files
1595     if options.add_files:
1596         for file_path in options.add_files:
1597             if not os.path.exists(file_path):
1598                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1599                 continue
1600             file_name = os.path.basename(file_path)
1601             d_files_to_add[file_name] = (file_path, file_name)
1602
1603     logger.write("\n", 2)
1604     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1605     logger.write("\n", 2)
1606     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1607
1608     res = 0
1609     try:
1610         # Creating the object tarfile
1611         tar = tarfile.open(path_targz, mode='w:gz')
1612         
1613         # get the filtering function if needed
1614         filter_function = exclude_VCS_and_extensions
1615
1616         # Add the files to the tarfile object
1617         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1618         tar.close()
1619     except KeyboardInterrupt:
1620         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1621         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1622         # remove the working directory
1623         shutil.rmtree(tmp_working_dir)
1624         logger.write(_("OK"), 1)
1625         logger.write(_("\n"), 1)
1626         return 1
1627     
1628     # case if no application, only package sat as 'sat package -t'
1629     try:
1630         app = runner.cfg.APPLICATION
1631     except:
1632         app = None
1633
1634     # unconditionaly remove the tmp_local_working_dir
1635     if app is not None:
1636         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1637         if os.path.isdir(tmp_local_working_dir):
1638             shutil.rmtree(tmp_local_working_dir)
1639
1640     # remove the tmp directory, unless user has registered as developer
1641     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1642         shutil.rmtree(tmp_working_dir)
1643     
1644     # Print again the path of the package
1645     logger.write("\n", 2)
1646     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1647     
1648     return res