Salome HOME
remove from config products with not-in_package property
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import pprint as PP
27
28 import src
29
30 from application import get_SALOME_modules
31 import src.debug as DBG
32
33 BINARY = "binary"
34 SOURCE = "Source"
35 PROJECT = "Project"
36 SAT = "Sat"
37
38 ARCHIVE_DIR = "ARCHIVES"
39 PROJECT_DIR = "PROJECT"
40
41 IGNORED_DIRS = [".git", ".svn"]
42 IGNORED_EXTENSIONS = []
43
44 PROJECT_TEMPLATE = """#!/usr/bin/env python
45 #-*- coding:utf-8 -*-
46
47 # The path to the archive root directory
48 root_path : $PWD + "/../"
49 # path to the PROJECT
50 project_path : $PWD + "/"
51
52 # Where to search the archives of the products
53 ARCHIVEPATH : $root_path + "ARCHIVES"
54 # Where to search the pyconf of the applications
55 APPLICATIONPATH : $project_path + "applications/"
56 # Where to search the pyconf of the products
57 PRODUCTPATH : $project_path + "products/"
58 # Where to search the pyconf of the jobs of the project
59 JOBPATH : $project_path + "jobs/"
60 # Where to search the pyconf of the machines of the project
61 MACHINEPATH : $project_path + "machines/"
62 """
63
64 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
65 #-*- coding:utf-8 -*-
66
67   LOCAL :
68   {
69     base : 'default'
70     workdir : 'default'
71     log_dir : 'default'
72     archive_dir : 'default'
73     VCS : None
74     tag : None
75   }
76
77 PROJECTS :
78 {
79 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
80 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
81 }
82 """)
83
84 # Define all possible option for the package command :  sat package <options>
85 parser = src.options.Options()
86 parser.add_option('b', 'binaries', 'boolean', 'binaries',
87     _('Optional: Produce a binary package.'), False)
88 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
89     _('Optional: Only binary package: produce the archive even if '
90       'there are some missing products.'), False)
91 parser.add_option('s', 'sources', 'boolean', 'sources',
92     _('Optional: Produce a compilable archive of the sources of the '
93       'application.'), False)
94 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
95     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
96       'Sat prepare will use VCS mode instead to retrieve them'),
97     False)
98 parser.add_option('', 'ftp', 'boolean', 'ftp',
99     _('Optional: Do not embed archives for products in archive mode.' 
100     'Sat prepare will use ftp instead to retrieve them'),
101     False)
102 parser.add_option('p', 'project', 'string', 'project',
103     _('Optional: Produce an archive that contains a project.'), "")
104 parser.add_option('t', 'salometools', 'boolean', 'sat',
105     _('Optional: Produce an archive that contains salomeTools.'), False)
106 parser.add_option('n', 'name', 'string', 'name',
107     _('Optional: The name or full path of the archive.'), None)
108 parser.add_option('', 'add_files', 'list2', 'add_files',
109     _('Optional: The list of additional files to add to the archive.'), [])
110 parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
111     _('Optional: do not add commercial licence.'), False)
112 parser.add_option('', 'without_properties', 'properties', 'without_properties',
113     _('Optional: Filter the products by their properties.\n\tSyntax: '
114       '--without_properties <property>:<value>'))
115
116
117 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
118     '''Create an archive containing all directories and files that are given in
119        the d_content argument.
120     
121     :param tar tarfile: The tarfile instance used to make the archive.
122     :param name_archive str: The name of the archive to make.
123     :param d_content dict: The dictionary that contain all directories and files
124                            to add in the archive.
125                            d_content[label] = 
126                                         (path_on_local_machine, path_in_archive)
127     :param logger Logger: the logging instance
128     :param f_exclude Function: the function that filters
129     :return: 0 if success, 1 if not.
130     :rtype: int
131     '''
132     # get the max length of the messages in order to make the display
133     max_len = len(max(d_content.keys(), key=len))
134     
135     success = 0
136     # loop over each directory or file stored in the d_content dictionary
137     names = sorted(d_content.keys())
138     DBG.write("add tar names", names)
139
140     for name in names:
141         # display information
142         len_points = max_len - len(name) + 3
143         local_path, archive_path = d_content[name]
144         in_archive = os.path.join(name_archive, archive_path)
145         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
146         # Get the local path and the path in archive 
147         # of the directory or file to add
148         # Add it in the archive
149         try:
150             tar.add(local_path, arcname=in_archive, exclude=f_exclude)
151             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
152         except Exception as e:
153             logger.write(src.printcolors.printcError(_("KO ")), 3)
154             logger.write(str(e), 3)
155             success = 1
156         logger.write("\n", 3)
157     return success
158
159 def exclude_VCS_and_extensions(filename):
160     ''' The function that is used to exclude from package the link to the 
161         VCS repositories (like .git)
162
163     :param filename Str: The filname to exclude (or not).
164     :return: True if the file has to be exclude
165     :rtype: Boolean
166     '''
167     for dir_name in IGNORED_DIRS:
168         if dir_name in filename:
169             return True
170     for extension in IGNORED_EXTENSIONS:
171         if filename.endswith(extension):
172             return True
173     return False
174
175 def produce_relative_launcher(config,
176                               logger,
177                               file_dir,
178                               file_name,
179                               binaries_dir_name,
180                               with_commercial=True):
181     '''Create a specific SALOME launcher for the binary package. This launcher 
182        uses relative paths.
183     
184     :param config Config: The global configuration.
185     :param logger Logger: the logging instance
186     :param file_dir str: the directory where to put the launcher
187     :param file_name str: The launcher name
188     :param binaries_dir_name str: the name of the repository where the binaries
189                                   are, in the archive.
190     :return: the path of the produced launcher
191     :rtype: str
192     '''
193     
194     # get KERNEL installation path 
195     kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
196
197     # set kernel bin dir (considering fhs property)
198     kernel_cfg = src.product.get_product_config(config, "KERNEL")
199     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
200         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
201     else:
202         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
203
204     # check if the application contains an application module
205     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
206     salome_application_name="Not defined" 
207     for prod_name, prod_info in l_product_info:
208         # look for a salome application
209         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
210             salome_application_name=prod_info.name
211             continue
212     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
213     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
214     if salome_application_name == "Not defined":
215         app_root_dir=kernel_root_dir
216     else:
217         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
218
219     # Get the launcher template and do substitutions
220     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
221         withProfile = src.fileEnviron.withProfile3
222     else:
223         withProfile = src.fileEnviron.withProfile
224
225     withProfile = withProfile.replace(
226         "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
227         "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + app_root_dir + "'")
228     withProfile = withProfile.replace(
229         " 'BIN_KERNEL_INSTALL_DIR'",
230         " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
231
232     before, after = withProfile.split("# here your local standalone environment\n")
233
234     # create an environment file writer
235     writer = src.environment.FileEnvWriter(config,
236                                            logger,
237                                            file_dir,
238                                            src_root=None)
239     
240     filepath = os.path.join(file_dir, file_name)
241     # open the file and write into it
242     launch_file = open(filepath, "w")
243     launch_file.write(before)
244     # Write
245     writer.write_cfgForPy_file(launch_file,
246                                for_package = binaries_dir_name,
247                                with_commercial=with_commercial)
248     launch_file.write(after)
249     launch_file.close()
250     
251     # Little hack to put out_dir_Path outside the strings
252     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
253     
254     # A hack to put a call to a file for distene licence.
255     # It does nothing to an application that has no distene product
256     hack_for_distene_licence(filepath)
257        
258     # change the rights in order to make the file executable for everybody
259     os.chmod(filepath,
260              stat.S_IRUSR |
261              stat.S_IRGRP |
262              stat.S_IROTH |
263              stat.S_IWUSR |
264              stat.S_IXUSR |
265              stat.S_IXGRP |
266              stat.S_IXOTH)
267
268     return filepath
269
270 def hack_for_distene_licence(filepath):
271     '''Replace the distene licence env variable by a call to a file.
272     
273     :param filepath Str: The path to the launcher to modify.
274     '''  
275     shutil.move(filepath, filepath + "_old")
276     fileout= filepath
277     filein = filepath + "_old"
278     fin = open(filein, "r")
279     fout = open(fileout, "w")
280     text = fin.readlines()
281     # Find the Distene section
282     num_line = -1
283     for i,line in enumerate(text):
284         if "# Set DISTENE License" in line:
285             num_line = i
286             break
287     if num_line == -1:
288         # No distene product, there is nothing to do
289         fin.close()
290         for line in text:
291             fout.write(line)
292         fout.close()
293         return
294     del text[num_line +1]
295     del text[num_line +1]
296     text_to_insert ="""    import imp
297     try:
298         distene = imp.load_source('distene_licence', '/data/tmpsalome/salome/prerequis/install/LICENSE/dlim8.var.py')
299         distene.set_distene_variables(context)
300     except:
301         pass\n"""
302     text.insert(num_line + 1, text_to_insert)
303     for line in text:
304         fout.write(line)
305     fin.close()    
306     fout.close()
307     return
308     
309 def produce_relative_env_files(config,
310                               logger,
311                               file_dir,
312                               binaries_dir_name):
313     '''Create some specific environment files for the binary package. These 
314        files use relative paths.
315     
316     :param config Config: The global configuration.
317     :param logger Logger: the logging instance
318     :param file_dir str: the directory where to put the files
319     :param binaries_dir_name str: the name of the repository where the binaries
320                                   are, in the archive.
321     :return: the list of path of the produced environment files
322     :rtype: List
323     '''  
324     # create an environment file writer
325     writer = src.environment.FileEnvWriter(config,
326                                            logger,
327                                            file_dir,
328                                            src_root=None)
329     
330     # Write
331     filepath = writer.write_env_file("env_launch.sh",
332                           False, # for launch
333                           "bash",
334                           for_package = binaries_dir_name)
335
336     # Little hack to put out_dir_Path as environment variable
337     src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
338
339     # change the rights in order to make the file executable for everybody
340     os.chmod(filepath,
341              stat.S_IRUSR |
342              stat.S_IRGRP |
343              stat.S_IROTH |
344              stat.S_IWUSR |
345              stat.S_IXUSR |
346              stat.S_IXGRP |
347              stat.S_IXOTH)
348     
349     return filepath
350
351 def produce_install_bin_file(config,
352                              logger,
353                              file_dir,
354                              d_sub,
355                              file_name):
356     '''Create a bash shell script which do substitutions in BIRARIES dir 
357        in order to use it for extra compilations.
358     
359     :param config Config: The global configuration.
360     :param logger Logger: the logging instance
361     :param file_dir str: the directory where to put the files
362     :param d_sub, dict: the dictionnary that contains the substitutions to be done
363     :param file_name str: the name of the install script file
364     :return: the produced file
365     :rtype: str
366     '''  
367     # Write
368     filepath = os.path.join(file_dir, file_name)
369     # open the file and write into it
370     # use codec utf-8 as sat variables are in unicode
371     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
372         installbin_template_path = os.path.join(config.VARS.internal_dir,
373                                         "INSTALL_BIN.template")
374         
375         # build the name of the directory that will contain the binaries
376         binaries_dir_name = "BINARIES-" + config.VARS.dist
377         # build the substitution loop
378         loop_cmd = "for f in $(grep -RIl"
379         for key in d_sub:
380             loop_cmd += " -e "+ key
381         loop_cmd += ' INSTALL); do\n     sed -i "\n'
382         for key in d_sub:
383             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
384         loop_cmd += '            " $f\ndone'
385
386         d={}
387         d["BINARIES_DIR"] = binaries_dir_name
388         d["SUBSTITUTION_LOOP"]=loop_cmd
389         
390         # substitute the template and write it in file
391         content=src.template.substitute(installbin_template_path, d)
392         installbin_file.write(content)
393         # change the rights in order to make the file executable for everybody
394         os.chmod(filepath,
395                  stat.S_IRUSR |
396                  stat.S_IRGRP |
397                  stat.S_IROTH |
398                  stat.S_IWUSR |
399                  stat.S_IXUSR |
400                  stat.S_IXGRP |
401                  stat.S_IXOTH)
402     
403     return filepath
404
405 def product_appli_creation_script(config,
406                                   logger,
407                                   file_dir,
408                                   binaries_dir_name):
409     '''Create a script that can produce an application (EDF style) in the binary
410        package.
411     
412     :param config Config: The global configuration.
413     :param logger Logger: the logging instance
414     :param file_dir str: the directory where to put the file
415     :param binaries_dir_name str: the name of the repository where the binaries
416                                   are, in the archive.
417     :return: the path of the produced script file
418     :rtype: Str
419     '''
420     template_name = "create_appli.py.for_bin_packages.template"
421     template_path = os.path.join(config.VARS.internal_dir, template_name)
422     text_to_fill = open(template_path, "r").read()
423     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
424                                         '"' + binaries_dir_name + '"')
425     
426     text_to_add = ""
427     for product_name in get_SALOME_modules(config):
428         product_info = src.product.get_product_config(config, product_name)
429        
430         if src.product.product_is_smesh_plugin(product_info):
431             continue
432
433         if 'install_dir' in product_info and bool(product_info.install_dir):
434             if src.product.product_is_cpp(product_info):
435                 # cpp module
436                 for cpp_name in src.product.get_product_components(product_info):
437                     line_to_add = ("<module name=\"" + 
438                                    cpp_name + 
439                                    "\" gui=\"yes\" path=\"''' + "
440                                    "os.path.join(dir_bin_name, \"" + 
441                                    cpp_name + "\") + '''\"/>")
442             else:
443                 # regular module
444                 line_to_add = ("<module name=\"" + 
445                                product_name + 
446                                "\" gui=\"yes\" path=\"''' + "
447                                "os.path.join(dir_bin_name, \"" + 
448                                product_name + "\") + '''\"/>")
449             text_to_add += line_to_add + "\n"
450     
451     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
452     
453     tmp_file_path = os.path.join(file_dir, "create_appli.py")
454     ff = open(tmp_file_path, "w")
455     ff.write(filled_text)
456     ff.close()
457     
458     # change the rights in order to make the file executable for everybody
459     os.chmod(tmp_file_path,
460              stat.S_IRUSR |
461              stat.S_IRGRP |
462              stat.S_IROTH |
463              stat.S_IWUSR |
464              stat.S_IXUSR |
465              stat.S_IXGRP |
466              stat.S_IXOTH)
467     
468     return tmp_file_path
469
470 def binary_package(config, logger, options, tmp_working_dir):
471     '''Prepare a dictionary that stores all the needed directories and files to
472        add in a binary package.
473     
474     :param config Config: The global configuration.
475     :param logger Logger: the logging instance
476     :param options OptResult: the options of the launched command
477     :param tmp_working_dir str: The temporary local directory containing some 
478                                 specific directories or files needed in the 
479                                 binary package
480     :return: the dictionary that stores all the needed directories and files to
481              add in a binary package.
482              {label : (path_on_local_machine, path_in_archive)}
483     :rtype: dict
484     '''
485
486     # Get the list of product installation to add to the archive
487     l_products_name = sorted(config.APPLICATION.products.keys())
488     l_product_info = src.product.get_products_infos(l_products_name,
489                                                     config)
490     l_install_dir = []
491     l_source_dir = []
492     l_not_installed = []
493     l_sources_not_present = []
494     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
495     if ("APPLICATION" in config  and
496         "properties"  in config.APPLICATION  and
497         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
498         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
499             generate_mesa_launcher=True
500
501     for prod_name, prod_info in l_product_info:
502         # skip product with property not_in_package set to yes
503         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
504             continue  
505
506         # Add the sources of the products that have the property 
507         # sources_in_package : "yes"
508         if src.get_property_in_product_cfg(prod_info,
509                                            "sources_in_package") == "yes":
510             if os.path.exists(prod_info.source_dir):
511                 l_source_dir.append((prod_name, prod_info.source_dir))
512             else:
513                 l_sources_not_present.append(prod_name)
514
515         # ignore the native and fixed products for install directories
516         if (src.product.product_is_native(prod_info) 
517                 or src.product.product_is_fixed(prod_info)
518                 or not src.product.product_compiles(prod_info)):
519             continue
520         if src.product.check_installation(prod_info):
521             l_install_dir.append((prod_name, prod_info.install_dir))
522         else:
523             l_not_installed.append(prod_name)
524         
525         # Add also the cpp generated modules (if any)
526         if src.product.product_is_cpp(prod_info):
527             # cpp module
528             for name_cpp in src.product.get_product_components(prod_info):
529                 install_dir = os.path.join(config.APPLICATION.workdir,
530                                            "INSTALL", name_cpp) 
531                 if os.path.exists(install_dir):
532                     l_install_dir.append((name_cpp, install_dir))
533                 else:
534                     l_not_installed.append(name_cpp)
535         
536     # check the name of the directory that (could) contains the binaries 
537     # from previous detar
538     binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
539     if os.path.exists(binaries_from_detar):
540          logger.write("""
541 WARNING: existing binaries directory from previous detar installation:
542          %s
543          To make new package from this, you have to: 
544          1) install binaries in INSTALL directory with the script "install_bin.sh" 
545             see README file for more details
546          2) or recompile everything in INSTALL with "sat compile" command 
547             this step is long, and requires some linux packages to be installed 
548             on your system\n
549 """ % binaries_from_detar)
550     
551     # Print warning or error if there are some missing products
552     if len(l_not_installed) > 0:
553         text_missing_prods = ""
554         for p_name in l_not_installed:
555             text_missing_prods += "-" + p_name + "\n"
556         if not options.force_creation:
557             msg = _("ERROR: there are missing products installations:")
558             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
559                                      text_missing_prods),
560                          1)
561             return None
562         else:
563             msg = _("WARNING: there are missing products installations:")
564             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
565                                      text_missing_prods),
566                          1)
567
568     # Do the same for sources
569     if len(l_sources_not_present) > 0:
570         text_missing_prods = ""
571         for p_name in l_sources_not_present:
572             text_missing_prods += "-" + p_name + "\n"
573         if not options.force_creation:
574             msg = _("ERROR: there are missing products sources:")
575             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
576                                      text_missing_prods),
577                          1)
578             return None
579         else:
580             msg = _("WARNING: there are missing products sources:")
581             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
582                                      text_missing_prods),
583                          1)
584  
585     # construct the name of the directory that will contain the binaries
586     binaries_dir_name = "BINARIES-" + config.VARS.dist
587     
588     # construct the correlation table between the product names, there 
589     # actual install directories and there install directory in archive
590     d_products = {}
591     for prod_name, install_dir in l_install_dir:
592         path_in_archive = os.path.join(binaries_dir_name, prod_name)
593         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
594         
595     for prod_name, source_dir in l_source_dir:
596         path_in_archive = os.path.join("SOURCES", prod_name)
597         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
598
599     # for packages of SALOME applications including KERNEL, 
600     # we produce a salome launcher or a virtual application (depending on salome version)
601     if 'KERNEL' in config.APPLICATION.products:
602         VersionSalome = src.get_salome_version(config)
603         # Case where SALOME has the launcher that uses the SalomeContext API
604         if VersionSalome >= 730:
605             # create the relative launcher and add it to the files to add
606             launcher_name = src.get_launcher_name(config)
607             launcher_package = produce_relative_launcher(config,
608                                                  logger,
609                                                  tmp_working_dir,
610                                                  launcher_name,
611                                                  binaries_dir_name,
612                                                  not(options.without_commercial))
613             d_products["launcher"] = (launcher_package, launcher_name)
614
615             # if the application contains mesa products, we generate in addition to the 
616             # classical salome launcher a launcher using mesa and called mesa_salome 
617             # (the mesa launcher will be used for remote usage through ssh).
618             if generate_mesa_launcher:
619                 #if there is one : store the use_mesa property
620                 restore_use_mesa_option=None
621                 if ('properties' in config.APPLICATION and 
622                     'use_mesa' in config.APPLICATION.properties):
623                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
624
625                 # activate mesa property, and generate a mesa launcher
626                 src.activate_mesa_property(config)  #activate use_mesa property
627                 launcher_mesa_name="mesa_"+launcher_name
628                 launcher_package_mesa = produce_relative_launcher(config,
629                                                      logger,
630                                                      tmp_working_dir,
631                                                      launcher_mesa_name,
632                                                      binaries_dir_name,
633                                                      not(options.without_commercial))
634                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
635
636                 # if there was a use_mesa value, we restore it
637                 # else we set it to the default value "no"
638                 if restore_use_mesa_option != None:
639                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
640                 else:
641                     config.APPLICATION.properties.use_mesa="no"
642
643             if options.sources:
644                 # if we mix binaries and sources, we add a copy of the launcher, 
645                 # prefixed  with "bin",in order to avoid clashes
646                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
647         else:
648             # Provide a script for the creation of an application EDF style
649             appli_script = product_appli_creation_script(config,
650                                                         logger,
651                                                         tmp_working_dir,
652                                                         binaries_dir_name)
653             
654             d_products["appli script"] = (appli_script, "create_appli.py")
655
656     # Put also the environment file
657     env_file = produce_relative_env_files(config,
658                                            logger,
659                                            tmp_working_dir,
660                                            binaries_dir_name)
661
662     d_products["environment file"] = (env_file, "env_launch.sh")
663       
664     return d_products
665
666 def source_package(sat, config, logger, options, tmp_working_dir):
667     '''Prepare a dictionary that stores all the needed directories and files to
668        add in a source package.
669     
670     :param config Config: The global configuration.
671     :param logger Logger: the logging instance
672     :param options OptResult: the options of the launched command
673     :param tmp_working_dir str: The temporary local directory containing some 
674                                 specific directories or files needed in the 
675                                 binary package
676     :return: the dictionary that stores all the needed directories and files to
677              add in a source package.
678              {label : (path_on_local_machine, path_in_archive)}
679     :rtype: dict
680     '''
681     
682     d_archives={}
683     # Get all the products that are prepared using an archive
684     # unless ftp mode is specified (in this case the user of the
685     # archive will get the sources through the ftp mode of sat prepare
686     if not options.ftp:
687         logger.write("Find archive products ... ")
688         d_archives, l_pinfo_vcs = get_archives(config, logger)
689         logger.write("Done\n")
690
691     d_archives_vcs = {}
692     if not options.with_vcs and len(l_pinfo_vcs) > 0:
693         # Make archives with the products that are not prepared using an archive
694         # (git, cvs, svn, etc)
695         logger.write("Construct archives for vcs products ... ")
696         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
697                                           sat,
698                                           config,
699                                           logger,
700                                           tmp_working_dir)
701         logger.write("Done\n")
702
703     # Create a project
704     logger.write("Create the project ... ")
705     d_project = create_project_for_src_package(config,
706                                                tmp_working_dir,
707                                                options.with_vcs,
708                                                options.ftp)
709     logger.write("Done\n")
710     
711     # Add salomeTools
712     tmp_sat = add_salomeTools(config, tmp_working_dir)
713     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
714     
715     # Add a sat symbolic link if not win
716     if not src.architecture.is_windows():
717         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
718         try:
719             t = os.getcwd()
720         except:
721             # In the jobs, os.getcwd() can fail
722             t = config.LOCAL.workdir
723         os.chdir(tmp_working_dir)
724         if os.path.lexists(tmp_satlink_path):
725             os.remove(tmp_satlink_path)
726         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
727         os.chdir(t)
728         
729         d_sat["sat link"] = (tmp_satlink_path, "sat")
730     
731     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
732     return d_source
733
734 def get_archives(config, logger):
735     '''Find all the products that are get using an archive and all the products
736        that are get using a vcs (git, cvs, svn) repository.
737     
738     :param config Config: The global configuration.
739     :param logger Logger: the logging instance
740     :return: the dictionary {name_product : 
741              (local path of its archive, path in the package of its archive )}
742              and the list of specific configuration corresponding to the vcs 
743              products
744     :rtype: (Dict, List)
745     '''
746     # Get the list of product informations
747     l_products_name = config.APPLICATION.products.keys()
748     l_product_info = src.product.get_products_infos(l_products_name,
749                                                     config)
750     d_archives = {}
751     l_pinfo_vcs = []
752     for p_name, p_info in l_product_info:
753         # skip product with property not_in_package set to yes
754         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
755             continue  
756         # ignore the native and fixed products
757         if (src.product.product_is_native(p_info) 
758                 or src.product.product_is_fixed(p_info)):
759             continue
760         if p_info.get_source == "archive":
761             archive_path = p_info.archive_info.archive_name
762             archive_name = os.path.basename(archive_path)
763         else:
764             l_pinfo_vcs.append((p_name, p_info))
765             
766         d_archives[p_name] = (archive_path,
767                               os.path.join(ARCHIVE_DIR, archive_name))
768     return d_archives, l_pinfo_vcs
769
770 def add_salomeTools(config, tmp_working_dir):
771     '''Prepare a version of salomeTools that has a specific local.pyconf file 
772        configured for a source package.
773
774     :param config Config: The global configuration.
775     :param tmp_working_dir str: The temporary local directory containing some 
776                                 specific directories or files needed in the 
777                                 source package
778     :return: The path to the local salomeTools directory to add in the package
779     :rtype: str
780     '''
781     # Copy sat in the temporary working directory
782     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
783     sat_running_path = src.Path(config.VARS.salometoolsway)
784     sat_running_path.copy(sat_tmp_path)
785     
786     # Update the local.pyconf file that contains the path to the project
787     local_pyconf_name = "local.pyconf"
788     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
789     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
790     # Remove the .pyconf file in the root directory of salomeTools if there is
791     # any. (For example when launching jobs, a pyconf file describing the jobs 
792     # can be here and is not useful) 
793     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
794     for file_or_dir in files_or_dir_SAT:
795         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
796             file_path = os.path.join(tmp_working_dir,
797                                      "salomeTools",
798                                      file_or_dir)
799             os.remove(file_path)
800     
801     ff = open(local_pyconf_file, "w")
802     ff.write(LOCAL_TEMPLATE)
803     ff.close()
804     
805     return sat_tmp_path.path
806
807 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
808     '''For sources package that require that all products are get using an 
809        archive, one has to create some archive for the vcs products.
810        So this method calls the clean and source command of sat and then create
811        the archives.
812
813     :param l_pinfo_vcs List: The list of specific configuration corresponding to
814                              each vcs product
815     :param sat Sat: The Sat instance that can be called to clean and source the
816                     products
817     :param config Config: The global configuration.
818     :param logger Logger: the logging instance
819     :param tmp_working_dir str: The temporary local directory containing some 
820                                 specific directories or files needed in the 
821                                 source package
822     :return: the dictionary that stores all the archives to add in the source 
823              package. {label : (path_on_local_machine, path_in_archive)}
824     :rtype: dict
825     '''
826     # clean the source directory of all the vcs products, then use the source 
827     # command and thus construct an archive that will not contain the patches
828     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
829     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
830       logger.write(_("\nclean sources\n"))
831       args_clean = config.VARS.application
832       args_clean += " --sources --products "
833       args_clean += ",".join(l_prod_names)
834       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
835       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
836     if True:
837       # source
838       logger.write(_("get sources\n"))
839       args_source = config.VARS.application
840       args_source += " --products "
841       args_source += ",".join(l_prod_names)
842       svgDir = sat.cfg.APPLICATION.workdir
843       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
844       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
845       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
846       # DBG.write("sat config id", id(sat.cfg), True)
847       # shit as config is not same id() as for sat.source()
848       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
849       import source
850       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
851       
852       # make the new archives
853       d_archives_vcs = {}
854       for pn, pinfo in l_pinfo_vcs:
855           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
856           logger.write("make archive vcs '%s'\n" % path_archive)
857           d_archives_vcs[pn] = (path_archive,
858                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
859       sat.cfg.APPLICATION.workdir = svgDir
860       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
861     return d_archives_vcs
862
863 def make_archive(prod_name, prod_info, where):
864     '''Create an archive of a product by searching its source directory.
865
866     :param prod_name str: The name of the product.
867     :param prod_info Config: The specific configuration corresponding to the 
868                              product
869     :param where str: The path of the repository where to put the resulting 
870                       archive
871     :return: The path of the resulting archive
872     :rtype: str
873     '''
874     path_targz_prod = os.path.join(where, prod_name + ".tgz")
875     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
876     local_path = prod_info.source_dir
877     tar_prod.add(local_path,
878                  arcname=prod_name,
879                  exclude=exclude_VCS_and_extensions)
880     tar_prod.close()
881     return path_targz_prod       
882
883 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
884     '''Create a specific project for a source package.
885
886     :param config Config: The global configuration.
887     :param tmp_working_dir str: The temporary local directory containing some 
888                                 specific directories or files needed in the 
889                                 source package
890     :param with_vcs boolean: True if the package is with vcs products (not 
891                              transformed into archive products)
892     :param with_ftp boolean: True if the package use ftp servers to get archives
893     :return: The dictionary 
894              {"project" : (produced project, project path in the archive)}
895     :rtype: Dict
896     '''
897
898     # Create in the working temporary directory the full project tree
899     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
900     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
901                                          "products")
902     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
903                                          "products",
904                                          "compil_scripts")
905     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
906                                          "products",
907                                          "env_scripts")
908     patches_tmp_dir = os.path.join(project_tmp_dir,
909                                          "products",
910                                          "patches")
911     application_tmp_dir = os.path.join(project_tmp_dir,
912                                          "applications")
913     for directory in [project_tmp_dir,
914                       compil_scripts_tmp_dir,
915                       env_scripts_tmp_dir,
916                       patches_tmp_dir,
917                       application_tmp_dir]:
918         src.ensure_path_exists(directory)
919
920     # Create the pyconf that contains the information of the project
921     project_pyconf_name = "project.pyconf"        
922     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
923     ff = open(project_pyconf_file, "w")
924     ff.write(PROJECT_TEMPLATE)
925     if with_ftp:
926         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
927         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
928             ftp_path=ftp_path+":"+ftpserver
929         ftp_path+='"'
930         ff.write("# ftp servers where to search for prerequisite archives\n")
931         ff.write(ftp_path)
932
933     ff.close()
934     
935     # Loop over the products to get there pyconf and all the scripts 
936     # (compilation, environment, patches)
937     # and create the pyconf file to add to the project
938     lproducts_name = config.APPLICATION.products.keys()
939     l_products = src.product.get_products_infos(lproducts_name, config)
940     for p_name, p_info in l_products:
941         # skip product with property not_in_package set to yes
942         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
943             continue  
944         find_product_scripts_and_pyconf(p_name,
945                                         p_info,
946                                         config,
947                                         with_vcs,
948                                         compil_scripts_tmp_dir,
949                                         env_scripts_tmp_dir,
950                                         patches_tmp_dir,
951                                         products_pyconf_tmp_dir)
952     
953     find_application_pyconf(config, application_tmp_dir)
954     
955     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
956     return d_project
957
958 def find_product_scripts_and_pyconf(p_name,
959                                     p_info,
960                                     config,
961                                     with_vcs,
962                                     compil_scripts_tmp_dir,
963                                     env_scripts_tmp_dir,
964                                     patches_tmp_dir,
965                                     products_pyconf_tmp_dir):
966     '''Create a specific pyconf file for a given product. Get its environment 
967        script, its compilation script and patches and put it in the temporary
968        working directory. This method is used in the source package in order to
969        construct the specific project.
970
971     :param p_name str: The name of the product.
972     :param p_info Config: The specific configuration corresponding to the 
973                              product
974     :param config Config: The global configuration.
975     :param with_vcs boolean: True if the package is with vcs products (not 
976                              transformed into archive products)
977     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
978                                        scripts directory of the project.
979     :param env_scripts_tmp_dir str: The path to the temporary environment script 
980                                     directory of the project.
981     :param patches_tmp_dir str: The path to the temporary patch scripts 
982                                 directory of the project.
983     :param products_pyconf_tmp_dir str: The path to the temporary product 
984                                         scripts directory of the project.
985     '''
986     
987     # read the pyconf of the product
988     product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
989                                            config.PATHS.PRODUCTPATH)
990     product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
991
992     # find the compilation script if any
993     if src.product.product_has_script(p_info):
994         compil_script_path = src.Path(p_info.compil_script)
995         compil_script_path.copy(compil_scripts_tmp_dir)
996         product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
997                                                     p_info.compil_script)
998     # find the environment script if any
999     if src.product.product_has_env_script(p_info):
1000         env_script_path = src.Path(p_info.environ.env_script)
1001         env_script_path.copy(env_scripts_tmp_dir)
1002         product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
1003                                                 p_info.environ.env_script)
1004     # find the patches if any
1005     if src.product.product_has_patches(p_info):
1006         patches = src.pyconf.Sequence()
1007         for patch_path in p_info.patches:
1008             p_path = src.Path(patch_path)
1009             p_path.copy(patches_tmp_dir)
1010             patches.append(os.path.basename(patch_path), "")
1011
1012         product_pyconf_cfg[p_info.section].patches = patches
1013     
1014     if with_vcs:
1015         # put in the pyconf file the resolved values
1016         for info in ["git_info", "cvs_info", "svn_info"]:
1017             if info in p_info:
1018                 for key in p_info[info]:
1019                     product_pyconf_cfg[p_info.section][info][key] = p_info[
1020                                                                       info][key]
1021     else:
1022         # if the product is not archive, then make it become archive.
1023         if src.product.product_is_vcs(p_info):
1024             product_pyconf_cfg[p_info.section].get_source = "archive"
1025             if not "archive_info" in product_pyconf_cfg[p_info.section]:
1026                 product_pyconf_cfg[p_info.section].addMapping("archive_info",
1027                                         src.pyconf.Mapping(product_pyconf_cfg),
1028                                         "")
1029             product_pyconf_cfg[p_info.section
1030                               ].archive_info.archive_name = p_info.name + ".tgz"
1031     
1032     # write the pyconf file to the temporary project location
1033     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1034                                            p_name + ".pyconf")
1035     ff = open(product_tmp_pyconf_path, 'w')
1036     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1037     product_pyconf_cfg.__save__(ff, 1)
1038     ff.close()
1039
1040 def find_application_pyconf(config, application_tmp_dir):
1041     '''Find the application pyconf file and put it in the specific temporary 
1042        directory containing the specific project of a source package.
1043
1044     :param config Config: The global configuration.
1045     :param application_tmp_dir str: The path to the temporary application 
1046                                        scripts directory of the project.
1047     '''
1048     # read the pyconf of the application
1049     application_name = config.VARS.application
1050     application_pyconf_path = src.find_file_in_lpath(
1051                                             application_name + ".pyconf",
1052                                             config.PATHS.APPLICATIONPATH)
1053     application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1054     
1055     # Change the workdir
1056     application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1057                                     application_pyconf_cfg,
1058                                     src.pyconf.DOLLAR,
1059                                     'VARS.salometoolsway + $VARS.sep + ".."')
1060
1061     # Prevent from compilation in base
1062     application_pyconf_cfg.APPLICATION.no_base = "yes"
1063     
1064     #remove products that are not in config (which were filtered by --without_properties)
1065     for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1066         if product_name not in config.APPLICATION.products.keys():
1067             application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1068
1069     # write the pyconf file to the temporary application location
1070     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1071                                                application_name + ".pyconf")
1072
1073     ff = open(application_tmp_pyconf_path, 'w')
1074     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1075     application_pyconf_cfg.__save__(ff, 1)
1076     ff.close()
1077
1078 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, logger):
1079     '''Prepare a dictionary that stores all the needed directories and files to
1080        add in a project package.
1081     
1082     :param project_file_path str: The path to the local project.
1083     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1084     :param tmp_working_dir str: The temporary local directory containing some 
1085                                 specific directories or files needed in the 
1086                                 project package
1087     :return: the dictionary that stores all the needed directories and files to
1088              add in a project package.
1089              {label : (path_on_local_machine, path_in_archive)}
1090     :rtype: dict
1091     '''
1092     d_project = {}
1093     # Read the project file and get the directories to add to the package
1094     
1095     try: 
1096       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1097     except:
1098       logger.write("""
1099 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1100       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1101       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1102     
1103     paths = {"APPLICATIONPATH" : "applications",
1104              "PRODUCTPATH" : "products",
1105              "JOBPATH" : "jobs",
1106              "MACHINEPATH" : "machines"}
1107     if not ftp_mode:
1108         paths["ARCHIVEPATH"] = "archives"
1109
1110     # Loop over the project paths and add it
1111     for path in paths:
1112         if path not in project_pyconf_cfg:
1113             continue
1114         # Add the directory to the files to add in the package
1115         d_project[path] = (project_pyconf_cfg[path], paths[path])
1116         # Modify the value of the path in the package
1117         project_pyconf_cfg[path] = src.pyconf.Reference(
1118                                     project_pyconf_cfg,
1119                                     src.pyconf.DOLLAR,
1120                                     'project_path + "/' + paths[path] + '"')
1121     
1122     # Modify some values
1123     if "project_path" not in project_pyconf_cfg:
1124         project_pyconf_cfg.addMapping("project_path",
1125                                       src.pyconf.Mapping(project_pyconf_cfg),
1126                                       "")
1127     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1128                                                            src.pyconf.DOLLAR,
1129                                                            'PWD')
1130     
1131     # Write the project pyconf file
1132     project_file_name = os.path.basename(project_file_path)
1133     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1134     ff = open(project_pyconf_tmp_path, 'w')
1135     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1136     project_pyconf_cfg.__save__(ff, 1)
1137     ff.close()
1138     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_name)
1139     
1140     return d_project
1141
1142 def add_readme(config, options, where):
1143     readme_path = os.path.join(where, "README")
1144     with codecs.open(readme_path, "w", 'utf-8') as f:
1145
1146     # templates for building the header
1147         readme_header="""
1148 # This package was generated with sat $version
1149 # Date: $date
1150 # User: $user
1151 # Distribution : $dist
1152
1153 In the following, $$ROOT represents the directory where you have installed 
1154 SALOME (the directory where this file is located).
1155
1156 """
1157         readme_compilation_with_binaries="""
1158
1159 compilation based on the binaries used as prerequisites
1160 =======================================================
1161
1162 If you fail to compile the complete application (for example because
1163 you are not root on your system and cannot install missing packages), you
1164 may try a partial compilation based on the binaries.
1165 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1166 and do some substitutions on cmake and .la files (replace the build directories
1167 with local paths).
1168 The procedure to do it is:
1169  1) Remove or rename INSTALL directory if it exists
1170  2) Execute the shell script install_bin.sh:
1171  > cd $ROOT
1172  > ./install_bin.sh
1173  3) Use SalomeTool (as explained in Sources section) and compile only the 
1174     modules you need to (with -p option)
1175
1176 """
1177         readme_header_tpl=string.Template(readme_header)
1178         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1179                 "README_BIN.template")
1180         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1181                 "README_LAUNCHER.template")
1182         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1183                 "README_BIN_VIRTUAL_APP.template")
1184         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1185                 "README_SRC.template")
1186         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1187                 "README_PROJECT.template")
1188         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1189                 "README_SAT.template")
1190
1191         # prepare substitution dictionary
1192         d = dict()
1193         d['user'] = config.VARS.user
1194         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1195         d['version'] = config.INTERNAL.sat_version
1196         d['dist'] = config.VARS.dist
1197         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1198
1199         if options.binaries or options.sources:
1200             d['application'] = config.VARS.application
1201             f.write("# Application: " + d['application'] + "\n")
1202             if 'KERNEL' in config.APPLICATION.products:
1203                 VersionSalome = src.get_salome_version(config)
1204                 # Case where SALOME has the launcher that uses the SalomeContext API
1205                 if VersionSalome >= 730:
1206                     d['launcher'] = config.APPLICATION.profile.launcher_name
1207                 else:
1208                     d['virtual_app'] = 'runAppli' # this info is not used now)
1209
1210         # write the specific sections
1211         if options.binaries:
1212             f.write(src.template.substitute(readme_template_path_bin, d))
1213             if "virtual_app" in d:
1214                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1215             if "launcher" in d:
1216                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1217
1218         if options.sources:
1219             f.write(src.template.substitute(readme_template_path_src, d))
1220
1221         if options.binaries and options.sources:
1222             f.write(readme_compilation_with_binaries)
1223
1224         if options.project:
1225             f.write(src.template.substitute(readme_template_path_pro, d))
1226
1227         if options.sat:
1228             f.write(src.template.substitute(readme_template_path_sat, d))
1229     
1230     return readme_path
1231
1232 def update_config(config, prop, value):
1233     '''Remove from config.APPLICATION.products the products that have the property given as input.
1234     
1235     :param config Config: The global config.
1236     :param prop str: The property to filter
1237     :param value str: The value of the property to filter
1238     '''
1239     src.check_config_has_application(config)
1240     l_product_to_remove = []
1241     for product_name in config.APPLICATION.products.keys():
1242         prod_cfg = src.product.get_product_config(config, product_name)
1243         if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1244             l_product_to_remove.append(product_name)
1245     for product_name in l_product_to_remove:
1246         config.APPLICATION.products.__delitem__(product_name)
1247
1248 def description():
1249     '''method that is called when salomeTools is called with --help option.
1250     
1251     :return: The text to display for the package command description.
1252     :rtype: str
1253     '''
1254     return _("""
1255 The package command creates a tar file archive of a product.
1256 There are four kinds of archive, which can be mixed:
1257
1258  1 - The binary archive. 
1259      It contains the product installation directories plus a launcher.
1260  2 - The sources archive. 
1261      It contains the product archives, a project (the application plus salomeTools).
1262  3 - The project archive. 
1263      It contains a project (give the project file path as argument).
1264  4 - The salomeTools archive. 
1265      It contains code utility salomeTools.
1266
1267 example:
1268  >> sat package SALOME-master --binaries --sources""")
1269   
1270 def run(args, runner, logger):
1271     '''method that is called when salomeTools is called with package parameter.
1272     '''
1273     
1274     # Parse the options
1275     (options, args) = parser.parse_args(args)
1276
1277     # Check that a type of package is called, and only one
1278     all_option_types = (options.binaries,
1279                         options.sources,
1280                         options.project not in ["", None],
1281                         options.sat)
1282
1283     # Check if no option for package type
1284     if all_option_types.count(True) == 0:
1285         msg = _("Error: Precise a type for the package\nUse one of the "
1286                 "following options: --binaries, --sources, --project or"
1287                 " --salometools")
1288         logger.write(src.printcolors.printcError(msg), 1)
1289         logger.write("\n", 1)
1290         return 1
1291     
1292     # The repository where to put the package if not Binary or Source
1293     package_default_path = runner.cfg.LOCAL.workdir
1294     
1295     # if the package contains binaries or sources:
1296     if options.binaries or options.sources:
1297         # Check that the command has been called with an application
1298         src.check_config_has_application(runner.cfg)
1299
1300         # Display information
1301         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1302                                                     runner.cfg.VARS.application), 1)
1303         
1304         # Get the default directory where to put the packages
1305         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1306         src.ensure_path_exists(package_default_path)
1307         
1308     # if the package contains a project:
1309     if options.project:
1310         # check that the project is visible by SAT
1311         projectNameFile = options.project + ".pyconf"
1312         foundProject = None
1313         for i in runner.cfg.PROJECTS.project_file_paths:
1314             baseName = os.path.basename(i)
1315             if baseName == projectNameFile:
1316                 foundProject = i
1317                 break
1318
1319         if foundProject is None:
1320             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1321             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1322 known projects are:
1323 %(2)s
1324
1325 Please add it in file:
1326 %(3)s""" % \
1327                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1328             logger.write(src.printcolors.printcError(msg), 1)
1329             logger.write("\n", 1)
1330             return 1
1331         else:
1332             options.project_file_path = foundProject
1333             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1334     
1335     # Remove the products that are filtered by the --without_properties option
1336     if options.without_properties:
1337         app = runner.cfg.APPLICATION
1338         logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1339         prop, value = options.without_properties
1340         update_config(runner.cfg, prop, value)
1341         logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1342
1343     # Remove from config the products that have the not_in_package property
1344     update_config(runner.cfg, "not_in_package", "yes")
1345     
1346     # get the name of the archive or build it
1347     if options.name:
1348         if os.path.basename(options.name) == options.name:
1349             # only a name (not a path)
1350             archive_name = options.name           
1351             dir_name = package_default_path
1352         else:
1353             archive_name = os.path.basename(options.name)
1354             dir_name = os.path.dirname(options.name)
1355         
1356         # suppress extension
1357         if archive_name[-len(".tgz"):] == ".tgz":
1358             archive_name = archive_name[:-len(".tgz")]
1359         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1360             archive_name = archive_name[:-len(".tar.gz")]
1361         
1362     else:
1363         archive_name=""
1364         dir_name = package_default_path
1365         if options.binaries or options.sources:
1366             archive_name = runner.cfg.APPLICATION.name
1367
1368         if options.binaries:
1369             archive_name += "-"+runner.cfg.VARS.dist
1370             
1371         if options.sources:
1372             archive_name += "-SRC"
1373             if options.with_vcs:
1374                 archive_name += "-VCS"
1375
1376         if options.project:
1377             project_name = options.project
1378             archive_name += ("PROJECT-" + project_name)
1379  
1380         if options.sat:
1381             archive_name += ("salomeTools_" + runner.cfg.INTERNAL.sat_version)
1382         if len(archive_name)==0: # no option worked 
1383             msg = _("Error: Cannot name the archive\n"
1384                     " check if at least one of the following options was "
1385                     "selected : --binaries, --sources, --project or"
1386                     " --salometools")
1387             logger.write(src.printcolors.printcError(msg), 1)
1388             logger.write("\n", 1)
1389             return 1
1390  
1391     path_targz = os.path.join(dir_name, archive_name + ".tgz")
1392     
1393     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1394
1395     # Create a working directory for all files that are produced during the
1396     # package creation and that will be removed at the end of the command
1397     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1398     src.ensure_path_exists(tmp_working_dir)
1399     logger.write("\n", 5)
1400     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1401     
1402     logger.write("\n", 3)
1403
1404     msg = _("Preparation of files to add to the archive")
1405     logger.write(src.printcolors.printcLabel(msg), 2)
1406     logger.write("\n", 2)
1407     
1408     d_files_to_add={}  # content of the archive
1409
1410     # a dict to hold paths that will need to be substitute for users recompilations
1411     d_paths_to_substitute={}  
1412
1413     if options.binaries:
1414         d_bin_files_to_add = binary_package(runner.cfg,
1415                                             logger,
1416                                             options,
1417                                             tmp_working_dir)
1418         # for all binaries dir, store the substitution that will be required 
1419         # for extra compilations
1420         for key in d_bin_files_to_add:
1421             if key.endswith("(bin)"):
1422                 source_dir = d_bin_files_to_add[key][0]
1423                 path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" + runner.cfg.VARS.dist,"INSTALL")
1424                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1425                     # if basename is the same we will just substitute the dirname 
1426                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1427                         os.path.dirname(path_in_archive)
1428                 else:
1429                     d_paths_to_substitute[source_dir]=path_in_archive
1430
1431         d_files_to_add.update(d_bin_files_to_add)
1432
1433     if options.sources:
1434         d_files_to_add.update(source_package(runner,
1435                                         runner.cfg,
1436                                         logger, 
1437                                         options,
1438                                         tmp_working_dir))
1439         if options.binaries:
1440             # for archives with bin and sources we provide a shell script able to 
1441             # install binaries for compilation
1442             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1443                                                       tmp_working_dir,
1444                                                       d_paths_to_substitute,
1445                                                       "install_bin.sh")
1446             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1447             logger.write("substitutions that need to be done later : \n", 5)
1448             logger.write(str(d_paths_to_substitute), 5)
1449             logger.write("\n", 5)
1450     else:
1451         # --salomeTool option is not considered when --sources is selected, as this option
1452         # already brings salomeTool!
1453         if options.sat:
1454             d_files_to_add.update({"salomeTools" : (runner.cfg.VARS.salometoolsway, "")})
1455         
1456     if options.project:
1457         DBG.write("config for package %s" % project_name, runner.cfg)
1458         d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, logger))
1459
1460     if not(d_files_to_add):
1461         msg = _("Error: Empty dictionnary to build the archive!\n")
1462         logger.write(src.printcolors.printcError(msg), 1)
1463         logger.write("\n", 1)
1464         return 1
1465
1466     # Add the README file in the package
1467     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1468     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1469
1470     # Add the additional files of option add_files
1471     if options.add_files:
1472         for file_path in options.add_files:
1473             if not os.path.exists(file_path):
1474                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1475                 continue
1476             file_name = os.path.basename(file_path)
1477             d_files_to_add[file_name] = (file_path, file_name)
1478
1479     logger.write("\n", 2)
1480     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1481     logger.write("\n", 2)
1482     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1483
1484     res = 0
1485     try:
1486         # Creating the object tarfile
1487         tar = tarfile.open(path_targz, mode='w:gz')
1488         
1489         # get the filtering function if needed
1490         filter_function = exclude_VCS_and_extensions
1491
1492         # Add the files to the tarfile object
1493         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1494         tar.close()
1495     except KeyboardInterrupt:
1496         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1497         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1498         # remove the working directory
1499         shutil.rmtree(tmp_working_dir)
1500         logger.write(_("OK"), 1)
1501         logger.write(_("\n"), 1)
1502         return 1
1503     
1504     # case if no application, only package sat as 'sat package -t'
1505     try:
1506         app = runner.cfg.APPLICATION
1507     except:
1508         app = None
1509
1510     # unconditionaly remove the tmp_local_working_dir
1511     if app is not None:
1512         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1513         if os.path.isdir(tmp_local_working_dir):
1514             shutil.rmtree(tmp_local_working_dir)
1515
1516     # have to decide some time
1517     DBG.tofix("make shutil.rmtree('%s') effective" % tmp_working_dir, "", DBG.isDeveloper())
1518     
1519     # Print again the path of the package
1520     logger.write("\n", 2)
1521     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1522     
1523     return res