Salome HOME
scs #13189: report des modifications pour que SAT fonctionne sous windows
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import pprint as PP
27
28 import src
29
30 from application import get_SALOME_modules
31 import src.debug as DBG
32
33 BINARY = "binary"
34 SOURCE = "Source"
35 PROJECT = "Project"
36 SAT = "Sat"
37
38 ARCHIVE_DIR = "ARCHIVES"
39 PROJECT_DIR = "PROJECT"
40
41 IGNORED_DIRS = [".git", ".svn"]
42 IGNORED_EXTENSIONS = []
43
44 PROJECT_TEMPLATE = """#!/usr/bin/env python
45 #-*- coding:utf-8 -*-
46
47 # The path to the archive root directory
48 root_path : $PWD + "/../"
49 # path to the PROJECT
50 project_path : $PWD + "/"
51
52 # Where to search the archives of the products
53 ARCHIVEPATH : $root_path + "ARCHIVES"
54 # Where to search the pyconf of the applications
55 APPLICATIONPATH : $project_path + "applications/"
56 # Where to search the pyconf of the products
57 PRODUCTPATH : $project_path + "products/"
58 # Where to search the pyconf of the jobs of the project
59 JOBPATH : $project_path + "jobs/"
60 # Where to search the pyconf of the machines of the project
61 MACHINEPATH : $project_path + "machines/"
62 """
63
64 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
65 #-*- coding:utf-8 -*-
66
67   LOCAL :
68   {
69     base : 'default'
70     workdir : 'default'
71     log_dir : 'default'
72     archive_dir : 'default'
73     VCS : None
74     tag : None
75   }
76
77 PROJECTS :
78 {
79 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
80 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
81 }
82 """)
83
84 # Define all possible option for the package command :  sat package <options>
85 parser = src.options.Options()
86 parser.add_option('b', 'binaries', 'boolean', 'binaries',
87     _('Optional: Produce a binary package.'), False)
88 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
89     _('Optional: Only binary package: produce the archive even if '
90       'there are some missing products.'), False)
91 parser.add_option('s', 'sources', 'boolean', 'sources',
92     _('Optional: Produce a compilable archive of the sources of the '
93       'application.'), False)
94 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
95     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
96       'Sat prepare will use VCS mode instead to retrieve them'),
97     False)
98 parser.add_option('', 'ftp', 'boolean', 'ftp',
99     _('Optional: Do not embed archives for products in archive mode.' 
100     'Sat prepare will use ftp instead to retrieve them'),
101     False)
102 parser.add_option('p', 'project', 'string', 'project',
103     _('Optional: Produce an archive that contains a project.'), "")
104 parser.add_option('t', 'salometools', 'boolean', 'sat',
105     _('Optional: Produce an archive that contains salomeTools.'), False)
106 parser.add_option('n', 'name', 'string', 'name',
107     _('Optional: The name or full path of the archive.'), None)
108 parser.add_option('', 'add_files', 'list2', 'add_files',
109     _('Optional: The list of additional files to add to the archive.'), [])
110 parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
111     _('Optional: do not add commercial licence.'), False)
112 parser.add_option('', 'without_properties', 'properties', 'without_properties',
113     _('Optional: Filter the products by their properties.\n\tSyntax: '
114       '--without_properties <property>:<value>'))
115
116
117 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
118     '''Create an archive containing all directories and files that are given in
119        the d_content argument.
120     
121     :param tar tarfile: The tarfile instance used to make the archive.
122     :param name_archive str: The name of the archive to make.
123     :param d_content dict: The dictionary that contain all directories and files
124                            to add in the archive.
125                            d_content[label] = 
126                                         (path_on_local_machine, path_in_archive)
127     :param logger Logger: the logging instance
128     :param f_exclude Function: the function that filters
129     :return: 0 if success, 1 if not.
130     :rtype: int
131     '''
132     # get the max length of the messages in order to make the display
133     max_len = len(max(d_content.keys(), key=len))
134     
135     success = 0
136     # loop over each directory or file stored in the d_content dictionary
137     names = sorted(d_content.keys())
138     DBG.write("add tar names", names)
139
140     for name in names:
141         # display information
142         len_points = max_len - len(name) + 3
143         local_path, archive_path = d_content[name]
144         in_archive = os.path.join(name_archive, archive_path)
145         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
146         # Get the local path and the path in archive 
147         # of the directory or file to add
148         # Add it in the archive
149         try:
150             tar.add(local_path, arcname=in_archive, exclude=f_exclude)
151             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
152         except Exception as e:
153             logger.write(src.printcolors.printcError(_("KO ")), 3)
154             logger.write(str(e), 3)
155             success = 1
156         logger.write("\n", 3)
157     return success
158
159 def exclude_VCS_and_extensions(filename):
160     ''' The function that is used to exclude from package the link to the 
161         VCS repositories (like .git)
162
163     :param filename Str: The filname to exclude (or not).
164     :return: True if the file has to be exclude
165     :rtype: Boolean
166     '''
167     for dir_name in IGNORED_DIRS:
168         if dir_name in filename:
169             return True
170     for extension in IGNORED_EXTENSIONS:
171         if filename.endswith(extension):
172             return True
173     return False
174
175 def produce_relative_launcher(config,
176                               logger,
177                               file_dir,
178                               file_name,
179                               binaries_dir_name,
180                               with_commercial=True):
181     '''Create a specific SALOME launcher for the binary package. This launcher 
182        uses relative paths.
183     
184     :param config Config: The global configuration.
185     :param logger Logger: the logging instance
186     :param file_dir str: the directory where to put the launcher
187     :param file_name str: The launcher name
188     :param binaries_dir_name str: the name of the repository where the binaries
189                                   are, in the archive.
190     :return: the path of the produced launcher
191     :rtype: str
192     '''
193     
194     # get KERNEL installation path 
195     kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
196
197     # set kernel bin dir (considering fhs property)
198     kernel_cfg = src.product.get_product_config(config, "KERNEL")
199     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
200         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
201     else:
202         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
203
204     # check if the application contains an application module
205     # check also if the application has a distene product, 
206     # in this case get its licence file name
207     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
208     salome_application_name="Not defined" 
209     distene_licence_file_name=False
210     for prod_name, prod_info in l_product_info:
211         # look for a "salome application" and a distene product
212         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
213             distene_licence_file_name = src.product.product_has_licence(prod_info, 
214                                             config.PATHS.LICENCEPATH) 
215         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
216             salome_application_name=prod_info.name
217
218     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
219     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
220     if salome_application_name == "Not defined":
221         app_root_dir=kernel_root_dir
222     else:
223         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
224
225     # Get the launcher template and do substitutions
226     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
227         withProfile = src.fileEnviron.withProfile3
228     else:
229         withProfile = src.fileEnviron.withProfile
230
231     withProfile = withProfile.replace(
232         "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
233         "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + app_root_dir + "'")
234     withProfile = withProfile.replace(
235         " 'BIN_KERNEL_INSTALL_DIR'",
236         " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
237
238     before, after = withProfile.split("# here your local standalone environment\n")
239
240     # create an environment file writer
241     writer = src.environment.FileEnvWriter(config,
242                                            logger,
243                                            file_dir,
244                                            src_root=None)
245     
246     filepath = os.path.join(file_dir, file_name)
247     # open the file and write into it
248     launch_file = open(filepath, "w")
249     launch_file.write(before)
250     # Write
251     writer.write_cfgForPy_file(launch_file,
252                                for_package = binaries_dir_name,
253                                with_commercial=with_commercial)
254     launch_file.write(after)
255     launch_file.close()
256     
257     # Little hack to put out_dir_Path outside the strings
258     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
259     
260     # A hack to put a call to a file for distene licence.
261     # It does nothing to an application that has no distene product
262     if distene_licence_file_name:
263         logger.write("Application has a distene licence file! We use it in package launcher", 5)
264         hack_for_distene_licence(filepath, distene_licence_file_name)
265        
266     # change the rights in order to make the file executable for everybody
267     os.chmod(filepath,
268              stat.S_IRUSR |
269              stat.S_IRGRP |
270              stat.S_IROTH |
271              stat.S_IWUSR |
272              stat.S_IXUSR |
273              stat.S_IXGRP |
274              stat.S_IXOTH)
275
276     return filepath
277
278 def hack_for_distene_licence(filepath, licence_file):
279     '''Replace the distene licence env variable by a call to a file.
280     
281     :param filepath Str: The path to the launcher to modify.
282     '''  
283     shutil.move(filepath, filepath + "_old")
284     fileout= filepath
285     filein = filepath + "_old"
286     fin = open(filein, "r")
287     fout = open(fileout, "w")
288     text = fin.readlines()
289     # Find the Distene section
290     num_line = -1
291     for i,line in enumerate(text):
292         if "# Set DISTENE License" in line:
293             num_line = i
294             break
295     if num_line == -1:
296         # No distene product, there is nothing to do
297         fin.close()
298         for line in text:
299             fout.write(line)
300         fout.close()
301         return
302     del text[num_line +1]
303     del text[num_line +1]
304     text_to_insert ="""    try:
305         distene_licence_file="%s"
306         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
307             import importlib.util
308             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
309             distene=importlib.util.module_from_spec(spec_dist)
310             spec_dist.loader.exec_module(distene)
311         else:
312             import imp
313             distene = imp.load_source('distene_licence', distene_licence_file)
314         distene.set_distene_variables(context)
315     except:
316         pass\n"""  % licence_file
317     text.insert(num_line + 1, text_to_insert)
318     for line in text:
319         fout.write(line)
320     fin.close()    
321     fout.close()
322     return
323     
324 def produce_relative_env_files(config,
325                               logger,
326                               file_dir,
327                               binaries_dir_name):
328     '''Create some specific environment files for the binary package. These 
329        files use relative paths.
330     
331     :param config Config: The global configuration.
332     :param logger Logger: the logging instance
333     :param file_dir str: the directory where to put the files
334     :param binaries_dir_name str: the name of the repository where the binaries
335                                   are, in the archive.
336     :return: the list of path of the produced environment files
337     :rtype: List
338     '''  
339     # create an environment file writer
340     writer = src.environment.FileEnvWriter(config,
341                                            logger,
342                                            file_dir,
343                                            src_root=None)
344     
345     if src.architecture.is_windows():
346       shell = "bat"
347       filename  = "env_launch.bat"
348     else:
349       shell = "bash"
350       filename  = "env_launch.sh"
351
352     # Write
353     filepath = writer.write_env_file(filename,
354                           False, # for launch
355                           shell,
356                           for_package = binaries_dir_name)
357
358     # Little hack to put out_dir_Path as environment variable
359     src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
360
361     # change the rights in order to make the file executable for everybody
362     os.chmod(filepath,
363              stat.S_IRUSR |
364              stat.S_IRGRP |
365              stat.S_IROTH |
366              stat.S_IWUSR |
367              stat.S_IXUSR |
368              stat.S_IXGRP |
369              stat.S_IXOTH)
370     
371     return filepath
372
373 def produce_install_bin_file(config,
374                              logger,
375                              file_dir,
376                              d_sub,
377                              file_name):
378     '''Create a bash shell script which do substitutions in BIRARIES dir 
379        in order to use it for extra compilations.
380     
381     :param config Config: The global configuration.
382     :param logger Logger: the logging instance
383     :param file_dir str: the directory where to put the files
384     :param d_sub, dict: the dictionnary that contains the substitutions to be done
385     :param file_name str: the name of the install script file
386     :return: the produced file
387     :rtype: str
388     '''  
389     # Write
390     filepath = os.path.join(file_dir, file_name)
391     # open the file and write into it
392     # use codec utf-8 as sat variables are in unicode
393     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
394         installbin_template_path = os.path.join(config.VARS.internal_dir,
395                                         "INSTALL_BIN.template")
396         
397         # build the name of the directory that will contain the binaries
398         binaries_dir_name = "BINARIES-" + config.VARS.dist
399         # build the substitution loop
400         loop_cmd = "for f in $(grep -RIl"
401         for key in d_sub:
402             loop_cmd += " -e "+ key
403         loop_cmd += ' INSTALL); do\n     sed -i "\n'
404         for key in d_sub:
405             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
406         loop_cmd += '            " $f\ndone'
407
408         d={}
409         d["BINARIES_DIR"] = binaries_dir_name
410         d["SUBSTITUTION_LOOP"]=loop_cmd
411         
412         # substitute the template and write it in file
413         content=src.template.substitute(installbin_template_path, d)
414         installbin_file.write(content)
415         # change the rights in order to make the file executable for everybody
416         os.chmod(filepath,
417                  stat.S_IRUSR |
418                  stat.S_IRGRP |
419                  stat.S_IROTH |
420                  stat.S_IWUSR |
421                  stat.S_IXUSR |
422                  stat.S_IXGRP |
423                  stat.S_IXOTH)
424     
425     return filepath
426
427 def product_appli_creation_script(config,
428                                   logger,
429                                   file_dir,
430                                   binaries_dir_name):
431     '''Create a script that can produce an application (EDF style) in the binary
432        package.
433     
434     :param config Config: The global configuration.
435     :param logger Logger: the logging instance
436     :param file_dir str: the directory where to put the file
437     :param binaries_dir_name str: the name of the repository where the binaries
438                                   are, in the archive.
439     :return: the path of the produced script file
440     :rtype: Str
441     '''
442     template_name = "create_appli.py.for_bin_packages.template"
443     template_path = os.path.join(config.VARS.internal_dir, template_name)
444     text_to_fill = open(template_path, "r").read()
445     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
446                                         '"' + binaries_dir_name + '"')
447     
448     text_to_add = ""
449     for product_name in get_SALOME_modules(config):
450         product_info = src.product.get_product_config(config, product_name)
451        
452         if src.product.product_is_smesh_plugin(product_info):
453             continue
454
455         if 'install_dir' in product_info and bool(product_info.install_dir):
456             if src.product.product_is_cpp(product_info):
457                 # cpp module
458                 for cpp_name in src.product.get_product_components(product_info):
459                     line_to_add = ("<module name=\"" + 
460                                    cpp_name + 
461                                    "\" gui=\"yes\" path=\"''' + "
462                                    "os.path.join(dir_bin_name, \"" + 
463                                    cpp_name + "\") + '''\"/>")
464             else:
465                 # regular module
466                 line_to_add = ("<module name=\"" + 
467                                product_name + 
468                                "\" gui=\"yes\" path=\"''' + "
469                                "os.path.join(dir_bin_name, \"" + 
470                                product_name + "\") + '''\"/>")
471             text_to_add += line_to_add + "\n"
472     
473     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
474     
475     tmp_file_path = os.path.join(file_dir, "create_appli.py")
476     ff = open(tmp_file_path, "w")
477     ff.write(filled_text)
478     ff.close()
479     
480     # change the rights in order to make the file executable for everybody
481     os.chmod(tmp_file_path,
482              stat.S_IRUSR |
483              stat.S_IRGRP |
484              stat.S_IROTH |
485              stat.S_IWUSR |
486              stat.S_IXUSR |
487              stat.S_IXGRP |
488              stat.S_IXOTH)
489     
490     return tmp_file_path
491
492 def binary_package(config, logger, options, tmp_working_dir):
493     '''Prepare a dictionary that stores all the needed directories and files to
494        add in a binary package.
495     
496     :param config Config: The global configuration.
497     :param logger Logger: the logging instance
498     :param options OptResult: the options of the launched command
499     :param tmp_working_dir str: The temporary local directory containing some 
500                                 specific directories or files needed in the 
501                                 binary package
502     :return: the dictionary that stores all the needed directories and files to
503              add in a binary package.
504              {label : (path_on_local_machine, path_in_archive)}
505     :rtype: dict
506     '''
507
508     # Get the list of product installation to add to the archive
509     l_products_name = sorted(config.APPLICATION.products.keys())
510     l_product_info = src.product.get_products_infos(l_products_name,
511                                                     config)
512     l_install_dir = []
513     l_source_dir = []
514     l_not_installed = []
515     l_sources_not_present = []
516     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
517     if ("APPLICATION" in config  and
518         "properties"  in config.APPLICATION  and
519         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
520         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
521             generate_mesa_launcher=True
522
523     for prod_name, prod_info in l_product_info:
524         # skip product with property not_in_package set to yes
525         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
526             continue  
527
528         # Add the sources of the products that have the property 
529         # sources_in_package : "yes"
530         if src.get_property_in_product_cfg(prod_info,
531                                            "sources_in_package") == "yes":
532             if os.path.exists(prod_info.source_dir):
533                 l_source_dir.append((prod_name, prod_info.source_dir))
534             else:
535                 l_sources_not_present.append(prod_name)
536
537         # ignore the native and fixed products for install directories
538         if (src.product.product_is_native(prod_info) 
539                 or src.product.product_is_fixed(prod_info)
540                 or not src.product.product_compiles(prod_info)):
541             continue
542         if src.product.check_installation(prod_info):
543             l_install_dir.append((prod_name, prod_info.install_dir))
544         else:
545             l_not_installed.append(prod_name)
546         
547         # Add also the cpp generated modules (if any)
548         if src.product.product_is_cpp(prod_info):
549             # cpp module
550             for name_cpp in src.product.get_product_components(prod_info):
551                 install_dir = os.path.join(config.APPLICATION.workdir,
552                                            "INSTALL", name_cpp) 
553                 if os.path.exists(install_dir):
554                     l_install_dir.append((name_cpp, install_dir))
555                 else:
556                     l_not_installed.append(name_cpp)
557         
558     # check the name of the directory that (could) contains the binaries 
559     # from previous detar
560     binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
561     if os.path.exists(binaries_from_detar):
562          logger.write("""
563 WARNING: existing binaries directory from previous detar installation:
564          %s
565          To make new package from this, you have to: 
566          1) install binaries in INSTALL directory with the script "install_bin.sh" 
567             see README file for more details
568          2) or recompile everything in INSTALL with "sat compile" command 
569             this step is long, and requires some linux packages to be installed 
570             on your system\n
571 """ % binaries_from_detar)
572     
573     # Print warning or error if there are some missing products
574     if len(l_not_installed) > 0:
575         text_missing_prods = ""
576         for p_name in l_not_installed:
577             text_missing_prods += "-" + p_name + "\n"
578         if not options.force_creation:
579             msg = _("ERROR: there are missing products installations:")
580             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
581                                      text_missing_prods),
582                          1)
583             return None
584         else:
585             msg = _("WARNING: there are missing products installations:")
586             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
587                                      text_missing_prods),
588                          1)
589
590     # Do the same for sources
591     if len(l_sources_not_present) > 0:
592         text_missing_prods = ""
593         for p_name in l_sources_not_present:
594             text_missing_prods += "-" + p_name + "\n"
595         if not options.force_creation:
596             msg = _("ERROR: there are missing products sources:")
597             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
598                                      text_missing_prods),
599                          1)
600             return None
601         else:
602             msg = _("WARNING: there are missing products sources:")
603             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
604                                      text_missing_prods),
605                          1)
606  
607     # construct the name of the directory that will contain the binaries
608     binaries_dir_name = "BINARIES-" + config.VARS.dist
609     
610     # construct the correlation table between the product names, there 
611     # actual install directories and there install directory in archive
612     d_products = {}
613     for prod_name, install_dir in l_install_dir:
614         path_in_archive = os.path.join(binaries_dir_name, prod_name)
615         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
616         
617     for prod_name, source_dir in l_source_dir:
618         path_in_archive = os.path.join("SOURCES", prod_name)
619         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
620
621     # for packages of SALOME applications including KERNEL, 
622     # we produce a salome launcher or a virtual application (depending on salome version)
623     if 'KERNEL' in config.APPLICATION.products:
624         VersionSalome = src.get_salome_version(config)
625         # Case where SALOME has the launcher that uses the SalomeContext API
626         if VersionSalome >= 730:
627             # create the relative launcher and add it to the files to add
628             launcher_name = src.get_launcher_name(config)
629             launcher_package = produce_relative_launcher(config,
630                                                  logger,
631                                                  tmp_working_dir,
632                                                  launcher_name,
633                                                  binaries_dir_name,
634                                                  not(options.without_commercial))
635             d_products["launcher"] = (launcher_package, launcher_name)
636
637             # if the application contains mesa products, we generate in addition to the 
638             # classical salome launcher a launcher using mesa and called mesa_salome 
639             # (the mesa launcher will be used for remote usage through ssh).
640             if generate_mesa_launcher:
641                 #if there is one : store the use_mesa property
642                 restore_use_mesa_option=None
643                 if ('properties' in config.APPLICATION and 
644                     'use_mesa' in config.APPLICATION.properties):
645                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
646
647                 # activate mesa property, and generate a mesa launcher
648                 src.activate_mesa_property(config)  #activate use_mesa property
649                 launcher_mesa_name="mesa_"+launcher_name
650                 launcher_package_mesa = produce_relative_launcher(config,
651                                                      logger,
652                                                      tmp_working_dir,
653                                                      launcher_mesa_name,
654                                                      binaries_dir_name,
655                                                      not(options.without_commercial))
656                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
657
658                 # if there was a use_mesa value, we restore it
659                 # else we set it to the default value "no"
660                 if restore_use_mesa_option != None:
661                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
662                 else:
663                     config.APPLICATION.properties.use_mesa="no"
664
665             if options.sources:
666                 # if we mix binaries and sources, we add a copy of the launcher, 
667                 # prefixed  with "bin",in order to avoid clashes
668                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
669         else:
670             # Provide a script for the creation of an application EDF style
671             appli_script = product_appli_creation_script(config,
672                                                         logger,
673                                                         tmp_working_dir,
674                                                         binaries_dir_name)
675             
676             d_products["appli script"] = (appli_script, "create_appli.py")
677
678     # Put also the environment file
679     env_file = produce_relative_env_files(config,
680                                            logger,
681                                            tmp_working_dir,
682                                            binaries_dir_name)
683
684     if src.architecture.is_windows():
685       filename  = "env_launch.bat"
686     else:
687       filename  = "env_launch.sh"
688     d_products["environment file"] = (env_file, filename)      
689
690     return d_products
691
692 def source_package(sat, config, logger, options, tmp_working_dir):
693     '''Prepare a dictionary that stores all the needed directories and files to
694        add in a source package.
695     
696     :param config Config: The global configuration.
697     :param logger Logger: the logging instance
698     :param options OptResult: the options of the launched command
699     :param tmp_working_dir str: The temporary local directory containing some 
700                                 specific directories or files needed in the 
701                                 binary package
702     :return: the dictionary that stores all the needed directories and files to
703              add in a source package.
704              {label : (path_on_local_machine, path_in_archive)}
705     :rtype: dict
706     '''
707     
708     d_archives={}
709     # Get all the products that are prepared using an archive
710     # unless ftp mode is specified (in this case the user of the
711     # archive will get the sources through the ftp mode of sat prepare
712     if not options.ftp:
713         logger.write("Find archive products ... ")
714         d_archives, l_pinfo_vcs = get_archives(config, logger)
715         logger.write("Done\n")
716
717     d_archives_vcs = {}
718     if not options.with_vcs and len(l_pinfo_vcs) > 0:
719         # Make archives with the products that are not prepared using an archive
720         # (git, cvs, svn, etc)
721         logger.write("Construct archives for vcs products ... ")
722         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
723                                           sat,
724                                           config,
725                                           logger,
726                                           tmp_working_dir)
727         logger.write("Done\n")
728
729     # Create a project
730     logger.write("Create the project ... ")
731     d_project = create_project_for_src_package(config,
732                                                tmp_working_dir,
733                                                options.with_vcs,
734                                                options.ftp)
735     logger.write("Done\n")
736     
737     # Add salomeTools
738     tmp_sat = add_salomeTools(config, tmp_working_dir)
739     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
740     
741     # Add a sat symbolic link if not win
742     if not src.architecture.is_windows():
743         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
744         try:
745             t = os.getcwd()
746         except:
747             # In the jobs, os.getcwd() can fail
748             t = config.LOCAL.workdir
749         os.chdir(tmp_working_dir)
750         if os.path.lexists(tmp_satlink_path):
751             os.remove(tmp_satlink_path)
752         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
753         os.chdir(t)
754         
755         d_sat["sat link"] = (tmp_satlink_path, "sat")
756     
757     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
758     return d_source
759
760 def get_archives(config, logger):
761     '''Find all the products that are get using an archive and all the products
762        that are get using a vcs (git, cvs, svn) repository.
763     
764     :param config Config: The global configuration.
765     :param logger Logger: the logging instance
766     :return: the dictionary {name_product : 
767              (local path of its archive, path in the package of its archive )}
768              and the list of specific configuration corresponding to the vcs 
769              products
770     :rtype: (Dict, List)
771     '''
772     # Get the list of product informations
773     l_products_name = config.APPLICATION.products.keys()
774     l_product_info = src.product.get_products_infos(l_products_name,
775                                                     config)
776     d_archives = {}
777     l_pinfo_vcs = []
778     for p_name, p_info in l_product_info:
779         # skip product with property not_in_package set to yes
780         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
781             continue  
782         # ignore the native and fixed products
783         if (src.product.product_is_native(p_info) 
784                 or src.product.product_is_fixed(p_info)):
785             continue
786         if p_info.get_source == "archive":
787             archive_path = p_info.archive_info.archive_name
788             archive_name = os.path.basename(archive_path)
789         else:
790             l_pinfo_vcs.append((p_name, p_info))
791             
792         d_archives[p_name] = (archive_path,
793                               os.path.join(ARCHIVE_DIR, archive_name))
794     return d_archives, l_pinfo_vcs
795
796 def add_salomeTools(config, tmp_working_dir):
797     '''Prepare a version of salomeTools that has a specific local.pyconf file 
798        configured for a source package.
799
800     :param config Config: The global configuration.
801     :param tmp_working_dir str: The temporary local directory containing some 
802                                 specific directories or files needed in the 
803                                 source package
804     :return: The path to the local salomeTools directory to add in the package
805     :rtype: str
806     '''
807     # Copy sat in the temporary working directory
808     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
809     sat_running_path = src.Path(config.VARS.salometoolsway)
810     sat_running_path.copy(sat_tmp_path)
811     
812     # Update the local.pyconf file that contains the path to the project
813     local_pyconf_name = "local.pyconf"
814     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
815     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
816     # Remove the .pyconf file in the root directory of salomeTools if there is
817     # any. (For example when launching jobs, a pyconf file describing the jobs 
818     # can be here and is not useful) 
819     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
820     for file_or_dir in files_or_dir_SAT:
821         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
822             file_path = os.path.join(tmp_working_dir,
823                                      "salomeTools",
824                                      file_or_dir)
825             os.remove(file_path)
826     
827     ff = open(local_pyconf_file, "w")
828     ff.write(LOCAL_TEMPLATE)
829     ff.close()
830     
831     return sat_tmp_path.path
832
833 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
834     '''For sources package that require that all products are get using an 
835        archive, one has to create some archive for the vcs products.
836        So this method calls the clean and source command of sat and then create
837        the archives.
838
839     :param l_pinfo_vcs List: The list of specific configuration corresponding to
840                              each vcs product
841     :param sat Sat: The Sat instance that can be called to clean and source the
842                     products
843     :param config Config: The global configuration.
844     :param logger Logger: the logging instance
845     :param tmp_working_dir str: The temporary local directory containing some 
846                                 specific directories or files needed in the 
847                                 source package
848     :return: the dictionary that stores all the archives to add in the source 
849              package. {label : (path_on_local_machine, path_in_archive)}
850     :rtype: dict
851     '''
852     # clean the source directory of all the vcs products, then use the source 
853     # command and thus construct an archive that will not contain the patches
854     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
855     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
856       logger.write(_("\nclean sources\n"))
857       args_clean = config.VARS.application
858       args_clean += " --sources --products "
859       args_clean += ",".join(l_prod_names)
860       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
861       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
862     if True:
863       # source
864       logger.write(_("get sources\n"))
865       args_source = config.VARS.application
866       args_source += " --products "
867       args_source += ",".join(l_prod_names)
868       svgDir = sat.cfg.APPLICATION.workdir
869       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
870       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
871       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
872       # DBG.write("sat config id", id(sat.cfg), True)
873       # shit as config is not same id() as for sat.source()
874       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
875       import source
876       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
877       
878       # make the new archives
879       d_archives_vcs = {}
880       for pn, pinfo in l_pinfo_vcs:
881           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
882           logger.write("make archive vcs '%s'\n" % path_archive)
883           d_archives_vcs[pn] = (path_archive,
884                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
885       sat.cfg.APPLICATION.workdir = svgDir
886       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
887     return d_archives_vcs
888
889 def make_archive(prod_name, prod_info, where):
890     '''Create an archive of a product by searching its source directory.
891
892     :param prod_name str: The name of the product.
893     :param prod_info Config: The specific configuration corresponding to the 
894                              product
895     :param where str: The path of the repository where to put the resulting 
896                       archive
897     :return: The path of the resulting archive
898     :rtype: str
899     '''
900     path_targz_prod = os.path.join(where, prod_name + ".tgz")
901     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
902     local_path = prod_info.source_dir
903     tar_prod.add(local_path,
904                  arcname=prod_name,
905                  exclude=exclude_VCS_and_extensions)
906     tar_prod.close()
907     return path_targz_prod       
908
909 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
910     '''Create a specific project for a source package.
911
912     :param config Config: The global configuration.
913     :param tmp_working_dir str: The temporary local directory containing some 
914                                 specific directories or files needed in the 
915                                 source package
916     :param with_vcs boolean: True if the package is with vcs products (not 
917                              transformed into archive products)
918     :param with_ftp boolean: True if the package use ftp servers to get archives
919     :return: The dictionary 
920              {"project" : (produced project, project path in the archive)}
921     :rtype: Dict
922     '''
923
924     # Create in the working temporary directory the full project tree
925     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
926     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
927                                          "products")
928     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
929                                          "products",
930                                          "compil_scripts")
931     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
932                                          "products",
933                                          "env_scripts")
934     patches_tmp_dir = os.path.join(project_tmp_dir,
935                                          "products",
936                                          "patches")
937     application_tmp_dir = os.path.join(project_tmp_dir,
938                                          "applications")
939     for directory in [project_tmp_dir,
940                       compil_scripts_tmp_dir,
941                       env_scripts_tmp_dir,
942                       patches_tmp_dir,
943                       application_tmp_dir]:
944         src.ensure_path_exists(directory)
945
946     # Create the pyconf that contains the information of the project
947     project_pyconf_name = "project.pyconf"        
948     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
949     ff = open(project_pyconf_file, "w")
950     ff.write(PROJECT_TEMPLATE)
951     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
952         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
953         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
954             ftp_path=ftp_path+":"+ftpserver
955         ftp_path+='"'
956         ff.write("# ftp servers where to search for prerequisite archives\n")
957         ff.write(ftp_path)
958     # add licence paths if any
959     if len(config.PATHS.LICENCEPATH) > 0:  
960         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
961         for path in config.PATHS.LICENCEPATH[1:]:
962             licence_path=licence_path+":"+path
963         licence_path+='"'
964         ff.write("\n# Where to search for licences\n")
965         ff.write(licence_path)
966         
967
968     ff.close()
969     
970     # Loop over the products to get there pyconf and all the scripts 
971     # (compilation, environment, patches)
972     # and create the pyconf file to add to the project
973     lproducts_name = config.APPLICATION.products.keys()
974     l_products = src.product.get_products_infos(lproducts_name, config)
975     for p_name, p_info in l_products:
976         # skip product with property not_in_package set to yes
977         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
978             continue  
979         find_product_scripts_and_pyconf(p_name,
980                                         p_info,
981                                         config,
982                                         with_vcs,
983                                         compil_scripts_tmp_dir,
984                                         env_scripts_tmp_dir,
985                                         patches_tmp_dir,
986                                         products_pyconf_tmp_dir)
987     
988     find_application_pyconf(config, application_tmp_dir)
989     
990     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
991     return d_project
992
993 def find_product_scripts_and_pyconf(p_name,
994                                     p_info,
995                                     config,
996                                     with_vcs,
997                                     compil_scripts_tmp_dir,
998                                     env_scripts_tmp_dir,
999                                     patches_tmp_dir,
1000                                     products_pyconf_tmp_dir):
1001     '''Create a specific pyconf file for a given product. Get its environment 
1002        script, its compilation script and patches and put it in the temporary
1003        working directory. This method is used in the source package in order to
1004        construct the specific project.
1005
1006     :param p_name str: The name of the product.
1007     :param p_info Config: The specific configuration corresponding to the 
1008                              product
1009     :param config Config: The global configuration.
1010     :param with_vcs boolean: True if the package is with vcs products (not 
1011                              transformed into archive products)
1012     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1013                                        scripts directory of the project.
1014     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1015                                     directory of the project.
1016     :param patches_tmp_dir str: The path to the temporary patch scripts 
1017                                 directory of the project.
1018     :param products_pyconf_tmp_dir str: The path to the temporary product 
1019                                         scripts directory of the project.
1020     '''
1021     
1022     # read the pyconf of the product
1023     product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
1024                                            config.PATHS.PRODUCTPATH)
1025     product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
1026
1027     # find the compilation script if any
1028     if src.product.product_has_script(p_info):
1029         compil_script_path = src.Path(p_info.compil_script)
1030         compil_script_path.copy(compil_scripts_tmp_dir)
1031         product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
1032                                                     p_info.compil_script)
1033     # find the environment script if any
1034     if src.product.product_has_env_script(p_info):
1035         env_script_path = src.Path(p_info.environ.env_script)
1036         env_script_path.copy(env_scripts_tmp_dir)
1037         product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
1038                                                 p_info.environ.env_script)
1039     # find the patches if any
1040     if src.product.product_has_patches(p_info):
1041         patches = src.pyconf.Sequence()
1042         for patch_path in p_info.patches:
1043             p_path = src.Path(patch_path)
1044             p_path.copy(patches_tmp_dir)
1045             patches.append(os.path.basename(patch_path), "")
1046
1047         product_pyconf_cfg[p_info.section].patches = patches
1048     
1049     if with_vcs:
1050         # put in the pyconf file the resolved values
1051         for info in ["git_info", "cvs_info", "svn_info"]:
1052             if info in p_info:
1053                 for key in p_info[info]:
1054                     product_pyconf_cfg[p_info.section][info][key] = p_info[
1055                                                                       info][key]
1056     else:
1057         # if the product is not archive, then make it become archive.
1058         if src.product.product_is_vcs(p_info):
1059             product_pyconf_cfg[p_info.section].get_source = "archive"
1060             if not "archive_info" in product_pyconf_cfg[p_info.section]:
1061                 product_pyconf_cfg[p_info.section].addMapping("archive_info",
1062                                         src.pyconf.Mapping(product_pyconf_cfg),
1063                                         "")
1064             product_pyconf_cfg[p_info.section
1065                               ].archive_info.archive_name = p_info.name + ".tgz"
1066     
1067     # write the pyconf file to the temporary project location
1068     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1069                                            p_name + ".pyconf")
1070     ff = open(product_tmp_pyconf_path, 'w')
1071     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1072     product_pyconf_cfg.__save__(ff, 1)
1073     ff.close()
1074
1075 def find_application_pyconf(config, application_tmp_dir):
1076     '''Find the application pyconf file and put it in the specific temporary 
1077        directory containing the specific project of a source package.
1078
1079     :param config Config: The global configuration.
1080     :param application_tmp_dir str: The path to the temporary application 
1081                                        scripts directory of the project.
1082     '''
1083     # read the pyconf of the application
1084     application_name = config.VARS.application
1085     application_pyconf_path = src.find_file_in_lpath(
1086                                             application_name + ".pyconf",
1087                                             config.PATHS.APPLICATIONPATH)
1088     application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1089     
1090     # Change the workdir
1091     application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1092                                     application_pyconf_cfg,
1093                                     src.pyconf.DOLLAR,
1094                                     'VARS.salometoolsway + $VARS.sep + ".."')
1095
1096     # Prevent from compilation in base
1097     application_pyconf_cfg.APPLICATION.no_base = "yes"
1098     
1099     #remove products that are not in config (which were filtered by --without_properties)
1100     for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1101         if product_name not in config.APPLICATION.products.keys():
1102             application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1103
1104     # write the pyconf file to the temporary application location
1105     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1106                                                application_name + ".pyconf")
1107
1108     ff = open(application_tmp_pyconf_path, 'w')
1109     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1110     application_pyconf_cfg.__save__(ff, 1)
1111     ff.close()
1112
1113 def sat_package(config, tmp_working_dir, options, logger):
1114     '''Prepare a dictionary that stores all the needed directories and files to
1115        add in a salomeTool package.
1116     
1117     :param tmp_working_dir str: The temporary local working directory 
1118     :param options OptResult: the options of the launched command
1119     :return: the dictionary that stores all the needed directories and files to
1120              add in a salomeTool package.
1121              {label : (path_on_local_machine, path_in_archive)}
1122     :rtype: dict
1123     '''
1124     d_project = {}
1125
1126     # we include sat himself
1127     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1128
1129     # and we overwrite local.pyconf with a clean wersion.
1130     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1131     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1132     local_cfg = src.pyconf.Config(local_file_path)
1133     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1134     local_cfg.LOCAL["base"] = "default"
1135     local_cfg.LOCAL["workdir"] = "default"
1136     local_cfg.LOCAL["log_dir"] = "default"
1137     local_cfg.LOCAL["archive_dir"] = "default"
1138     local_cfg.LOCAL["VCS"] = "None"
1139     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1140
1141     # if the archive contains a project, we write its relative path in local.pyconf
1142     if options.project:
1143         project_arch_path = os.path.join("projects", options.project, 
1144                                          os.path.basename(options.project_file_path))
1145         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1146
1147     ff = open(local_pyconf_tmp_path, 'w')
1148     local_cfg.__save__(ff, 1)
1149     ff.close()
1150     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1151     return d_project
1152     
1153
1154 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1155     '''Prepare a dictionary that stores all the needed directories and files to
1156        add in a project package.
1157     
1158     :param project_file_path str: The path to the local project.
1159     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1160     :param tmp_working_dir str: The temporary local directory containing some 
1161                                 specific directories or files needed in the 
1162                                 project package
1163     :param embedded_in_sat boolean : the project package is embedded in a sat package
1164     :return: the dictionary that stores all the needed directories and files to
1165              add in a project package.
1166              {label : (path_on_local_machine, path_in_archive)}
1167     :rtype: dict
1168     '''
1169     d_project = {}
1170     # Read the project file and get the directories to add to the package
1171     
1172     try: 
1173       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1174     except:
1175       logger.write("""
1176 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1177       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1178       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1179     
1180     paths = {"APPLICATIONPATH" : "applications",
1181              "PRODUCTPATH" : "products",
1182              "JOBPATH" : "jobs",
1183              "MACHINEPATH" : "machines"}
1184     if not ftp_mode:
1185         paths["ARCHIVEPATH"] = "archives"
1186
1187     # Loop over the project paths and add it
1188     project_file_name = os.path.basename(project_file_path)
1189     for path in paths:
1190         if path not in project_pyconf_cfg:
1191             continue
1192         if embedded_in_sat:
1193             dest_path = os.path.join("projects", name_project, paths[path])
1194             project_file_dest = os.path.join("projects", name_project, project_file_name)
1195         else:
1196             dest_path = paths[path]
1197             project_file_dest = project_file_name
1198
1199         # Add the directory to the files to add in the package
1200         d_project[path] = (project_pyconf_cfg[path], dest_path)
1201
1202         # Modify the value of the path in the package
1203         project_pyconf_cfg[path] = src.pyconf.Reference(
1204                                     project_pyconf_cfg,
1205                                     src.pyconf.DOLLAR,
1206                                     'project_path + "/' + paths[path] + '"')
1207     
1208     # Modify some values
1209     if "project_path" not in project_pyconf_cfg:
1210         project_pyconf_cfg.addMapping("project_path",
1211                                       src.pyconf.Mapping(project_pyconf_cfg),
1212                                       "")
1213     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1214                                                            src.pyconf.DOLLAR,
1215                                                            'PWD')
1216     # we don't want to export these two fields
1217     project_pyconf_cfg.__delitem__("file_path")
1218     project_pyconf_cfg.__delitem__("PWD")
1219     if ftp_mode:
1220         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1221     
1222     # Write the project pyconf file
1223     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1224     ff = open(project_pyconf_tmp_path, 'w')
1225     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1226     project_pyconf_cfg.__save__(ff, 1)
1227     ff.close()
1228     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1229     
1230     return d_project
1231
1232 def add_readme(config, options, where):
1233     readme_path = os.path.join(where, "README")
1234     with codecs.open(readme_path, "w", 'utf-8') as f:
1235
1236     # templates for building the header
1237         readme_header="""
1238 # This package was generated with sat $version
1239 # Date: $date
1240 # User: $user
1241 # Distribution : $dist
1242
1243 In the following, $$ROOT represents the directory where you have installed 
1244 SALOME (the directory where this file is located).
1245
1246 """
1247         readme_compilation_with_binaries="""
1248
1249 compilation based on the binaries used as prerequisites
1250 =======================================================
1251
1252 If you fail to compile the complete application (for example because
1253 you are not root on your system and cannot install missing packages), you
1254 may try a partial compilation based on the binaries.
1255 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1256 and do some substitutions on cmake and .la files (replace the build directories
1257 with local paths).
1258 The procedure to do it is:
1259  1) Remove or rename INSTALL directory if it exists
1260  2) Execute the shell script install_bin.sh:
1261  > cd $ROOT
1262  > ./install_bin.sh
1263  3) Use SalomeTool (as explained in Sources section) and compile only the 
1264     modules you need to (with -p option)
1265
1266 """
1267         readme_header_tpl=string.Template(readme_header)
1268         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1269                 "README_BIN.template")
1270         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1271                 "README_LAUNCHER.template")
1272         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1273                 "README_BIN_VIRTUAL_APP.template")
1274         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1275                 "README_SRC.template")
1276         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1277                 "README_PROJECT.template")
1278         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1279                 "README_SAT.template")
1280
1281         # prepare substitution dictionary
1282         d = dict()
1283         d['user'] = config.VARS.user
1284         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1285         d['version'] = src.get_salometool_version(config)
1286         d['dist'] = config.VARS.dist
1287         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1288
1289         if options.binaries or options.sources:
1290             d['application'] = config.VARS.application
1291             f.write("# Application: " + d['application'] + "\n")
1292             if 'KERNEL' in config.APPLICATION.products:
1293                 VersionSalome = src.get_salome_version(config)
1294                 # Case where SALOME has the launcher that uses the SalomeContext API
1295                 if VersionSalome >= 730:
1296                     d['launcher'] = config.APPLICATION.profile.launcher_name
1297                 else:
1298                     d['virtual_app'] = 'runAppli' # this info is not used now)
1299
1300         # write the specific sections
1301         if options.binaries:
1302             f.write(src.template.substitute(readme_template_path_bin, d))
1303             if "virtual_app" in d:
1304                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1305             if "launcher" in d:
1306                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1307
1308         if options.sources:
1309             f.write(src.template.substitute(readme_template_path_src, d))
1310
1311         if options.binaries and options.sources:
1312             f.write(readme_compilation_with_binaries)
1313
1314         if options.project:
1315             f.write(src.template.substitute(readme_template_path_pro, d))
1316
1317         if options.sat:
1318             f.write(src.template.substitute(readme_template_path_sat, d))
1319     
1320     return readme_path
1321
1322 def update_config(config, prop, value):
1323     '''Remove from config.APPLICATION.products the products that have the property given as input.
1324     
1325     :param config Config: The global config.
1326     :param prop str: The property to filter
1327     :param value str: The value of the property to filter
1328     '''
1329     # if there is no APPLICATION (ex sat package -t) : nothing to do
1330     if "APPLICATION" in config:
1331         l_product_to_remove = []
1332         for product_name in config.APPLICATION.products.keys():
1333             prod_cfg = src.product.get_product_config(config, product_name)
1334             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1335                 l_product_to_remove.append(product_name)
1336         for product_name in l_product_to_remove:
1337             config.APPLICATION.products.__delitem__(product_name)
1338
1339 def description():
1340     '''method that is called when salomeTools is called with --help option.
1341     
1342     :return: The text to display for the package command description.
1343     :rtype: str
1344     '''
1345     return _("""
1346 The package command creates a tar file archive of a product.
1347 There are four kinds of archive, which can be mixed:
1348
1349  1 - The binary archive. 
1350      It contains the product installation directories plus a launcher.
1351  2 - The sources archive. 
1352      It contains the product archives, a project (the application plus salomeTools).
1353  3 - The project archive. 
1354      It contains a project (give the project file path as argument).
1355  4 - The salomeTools archive. 
1356      It contains code utility salomeTools.
1357
1358 example:
1359  >> sat package SALOME-master --binaries --sources""")
1360   
1361 def run(args, runner, logger):
1362     '''method that is called when salomeTools is called with package parameter.
1363     '''
1364     
1365     # Parse the options
1366     (options, args) = parser.parse_args(args)
1367
1368     # Check that a type of package is called, and only one
1369     all_option_types = (options.binaries,
1370                         options.sources,
1371                         options.project not in ["", None],
1372                         options.sat)
1373
1374     # Check if no option for package type
1375     if all_option_types.count(True) == 0:
1376         msg = _("Error: Precise a type for the package\nUse one of the "
1377                 "following options: --binaries, --sources, --project or"
1378                 " --salometools")
1379         logger.write(src.printcolors.printcError(msg), 1)
1380         logger.write("\n", 1)
1381         return 1
1382     
1383     # The repository where to put the package if not Binary or Source
1384     package_default_path = runner.cfg.LOCAL.workdir
1385     
1386     # if the package contains binaries or sources:
1387     if options.binaries or options.sources:
1388         # Check that the command has been called with an application
1389         src.check_config_has_application(runner.cfg)
1390
1391         # Display information
1392         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1393                                                     runner.cfg.VARS.application), 1)
1394         
1395         # Get the default directory where to put the packages
1396         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1397         src.ensure_path_exists(package_default_path)
1398         
1399     # if the package contains a project:
1400     if options.project:
1401         # check that the project is visible by SAT
1402         projectNameFile = options.project + ".pyconf"
1403         foundProject = None
1404         for i in runner.cfg.PROJECTS.project_file_paths:
1405             baseName = os.path.basename(i)
1406             if baseName == projectNameFile:
1407                 foundProject = i
1408                 break
1409
1410         if foundProject is None:
1411             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1412             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1413 known projects are:
1414 %(2)s
1415
1416 Please add it in file:
1417 %(3)s""" % \
1418                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1419             logger.write(src.printcolors.printcError(msg), 1)
1420             logger.write("\n", 1)
1421             return 1
1422         else:
1423             options.project_file_path = foundProject
1424             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1425     
1426     # Remove the products that are filtered by the --without_properties option
1427     if options.without_properties:
1428         app = runner.cfg.APPLICATION
1429         logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1430         prop, value = options.without_properties
1431         update_config(runner.cfg, prop, value)
1432         logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1433
1434     # Remove from config the products that have the not_in_package property
1435     update_config(runner.cfg, "not_in_package", "yes")
1436     
1437     # get the name of the archive or build it
1438     if options.name:
1439         if os.path.basename(options.name) == options.name:
1440             # only a name (not a path)
1441             archive_name = options.name           
1442             dir_name = package_default_path
1443         else:
1444             archive_name = os.path.basename(options.name)
1445             dir_name = os.path.dirname(options.name)
1446         
1447         # suppress extension
1448         if archive_name[-len(".tgz"):] == ".tgz":
1449             archive_name = archive_name[:-len(".tgz")]
1450         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1451             archive_name = archive_name[:-len(".tar.gz")]
1452         
1453     else:
1454         archive_name=""
1455         dir_name = package_default_path
1456         if options.binaries or options.sources:
1457             archive_name = runner.cfg.APPLICATION.name
1458
1459         if options.binaries:
1460             archive_name += "-"+runner.cfg.VARS.dist
1461             
1462         if options.sources:
1463             archive_name += "-SRC"
1464             if options.with_vcs:
1465                 archive_name += "-VCS"
1466
1467         if options.sat:
1468             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1469
1470         if options.project:
1471             if options.sat:
1472                 archive_name += "_" 
1473             project_name = options.project
1474             archive_name += ("satproject_" + project_name)
1475  
1476         if len(archive_name)==0: # no option worked 
1477             msg = _("Error: Cannot name the archive\n"
1478                     " check if at least one of the following options was "
1479                     "selected : --binaries, --sources, --project or"
1480                     " --salometools")
1481             logger.write(src.printcolors.printcError(msg), 1)
1482             logger.write("\n", 1)
1483             return 1
1484  
1485     path_targz = os.path.join(dir_name, archive_name + ".tgz")
1486     
1487     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1488
1489     # Create a working directory for all files that are produced during the
1490     # package creation and that will be removed at the end of the command
1491     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1492     src.ensure_path_exists(tmp_working_dir)
1493     logger.write("\n", 5)
1494     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1495     
1496     logger.write("\n", 3)
1497
1498     msg = _("Preparation of files to add to the archive")
1499     logger.write(src.printcolors.printcLabel(msg), 2)
1500     logger.write("\n", 2)
1501     
1502     d_files_to_add={}  # content of the archive
1503
1504     # a dict to hold paths that will need to be substitute for users recompilations
1505     d_paths_to_substitute={}  
1506
1507     if options.binaries:
1508         d_bin_files_to_add = binary_package(runner.cfg,
1509                                             logger,
1510                                             options,
1511                                             tmp_working_dir)
1512         # for all binaries dir, store the substitution that will be required 
1513         # for extra compilations
1514         for key in d_bin_files_to_add:
1515             if key.endswith("(bin)"):
1516                 source_dir = d_bin_files_to_add[key][0]
1517                 path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" + runner.cfg.VARS.dist,"INSTALL")
1518                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1519                     # if basename is the same we will just substitute the dirname 
1520                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1521                         os.path.dirname(path_in_archive)
1522                 else:
1523                     d_paths_to_substitute[source_dir]=path_in_archive
1524
1525         d_files_to_add.update(d_bin_files_to_add)
1526
1527     if options.sources:
1528         d_files_to_add.update(source_package(runner,
1529                                         runner.cfg,
1530                                         logger, 
1531                                         options,
1532                                         tmp_working_dir))
1533         if options.binaries:
1534             # for archives with bin and sources we provide a shell script able to 
1535             # install binaries for compilation
1536             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1537                                                       tmp_working_dir,
1538                                                       d_paths_to_substitute,
1539                                                       "install_bin.sh")
1540             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1541             logger.write("substitutions that need to be done later : \n", 5)
1542             logger.write(str(d_paths_to_substitute), 5)
1543             logger.write("\n", 5)
1544     else:
1545         # --salomeTool option is not considered when --sources is selected, as this option
1546         # already brings salomeTool!
1547         if options.sat:
1548             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1549                                   options, logger))
1550         
1551     if options.project:
1552         DBG.write("config for package %s" % project_name, runner.cfg)
1553         d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1554
1555     if not(d_files_to_add):
1556         msg = _("Error: Empty dictionnary to build the archive!\n")
1557         logger.write(src.printcolors.printcError(msg), 1)
1558         logger.write("\n", 1)
1559         return 1
1560
1561     # Add the README file in the package
1562     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1563     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1564
1565     # Add the additional files of option add_files
1566     if options.add_files:
1567         for file_path in options.add_files:
1568             if not os.path.exists(file_path):
1569                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1570                 continue
1571             file_name = os.path.basename(file_path)
1572             d_files_to_add[file_name] = (file_path, file_name)
1573
1574     logger.write("\n", 2)
1575     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1576     logger.write("\n", 2)
1577     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1578
1579     res = 0
1580     try:
1581         # Creating the object tarfile
1582         tar = tarfile.open(path_targz, mode='w:gz')
1583         
1584         # get the filtering function if needed
1585         filter_function = exclude_VCS_and_extensions
1586
1587         # Add the files to the tarfile object
1588         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1589         tar.close()
1590     except KeyboardInterrupt:
1591         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1592         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1593         # remove the working directory
1594         shutil.rmtree(tmp_working_dir)
1595         logger.write(_("OK"), 1)
1596         logger.write(_("\n"), 1)
1597         return 1
1598     
1599     # case if no application, only package sat as 'sat package -t'
1600     try:
1601         app = runner.cfg.APPLICATION
1602     except:
1603         app = None
1604
1605     # unconditionaly remove the tmp_local_working_dir
1606     if app is not None:
1607         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1608         if os.path.isdir(tmp_local_working_dir):
1609             shutil.rmtree(tmp_local_working_dir)
1610
1611     # remove the tmp directory, unless user has registered as developer
1612     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1613         shutil.rmtree(tmp_working_dir)
1614     
1615     # Print again the path of the package
1616     logger.write("\n", 2)
1617     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1618     
1619     return res