Salome HOME
Merge branch 'nct/pip'
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import pprint as PP
27
28 import src
29
30 from application import get_SALOME_modules
31 import src.debug as DBG
32
33 BINARY = "binary"
34 SOURCE = "Source"
35 PROJECT = "Project"
36 SAT = "Sat"
37
38 ARCHIVE_DIR = "ARCHIVES"
39 PROJECT_DIR = "PROJECT"
40
41 IGNORED_DIRS = [".git", ".svn"]
42 IGNORED_EXTENSIONS = []
43
44 PROJECT_TEMPLATE = """#!/usr/bin/env python
45 #-*- coding:utf-8 -*-
46
47 # The path to the archive root directory
48 root_path : $PWD + "/../"
49 # path to the PROJECT
50 project_path : $PWD + "/"
51
52 # Where to search the archives of the products
53 ARCHIVEPATH : $root_path + "ARCHIVES"
54 # Where to search the pyconf of the applications
55 APPLICATIONPATH : $project_path + "applications/"
56 # Where to search the pyconf of the products
57 PRODUCTPATH : $project_path + "products/"
58 # Where to search the pyconf of the jobs of the project
59 JOBPATH : $project_path + "jobs/"
60 # Where to search the pyconf of the machines of the project
61 MACHINEPATH : $project_path + "machines/"
62 """
63
64 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
65 #-*- coding:utf-8 -*-
66
67   LOCAL :
68   {
69     base : 'default'
70     workdir : 'default'
71     log_dir : 'default'
72     archive_dir : 'default'
73     VCS : None
74     tag : None
75   }
76
77 PROJECTS :
78 {
79 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
80 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
81 }
82 """)
83
84 # Define all possible option for the package command :  sat package <options>
85 parser = src.options.Options()
86 parser.add_option('b', 'binaries', 'boolean', 'binaries',
87     _('Optional: Produce a binary package.'), False)
88 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
89     _('Optional: Only binary package: produce the archive even if '
90       'there are some missing products.'), False)
91 parser.add_option('s', 'sources', 'boolean', 'sources',
92     _('Optional: Produce a compilable archive of the sources of the '
93       'application.'), False)
94 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
95     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). ' 
96       'Sat prepare will use VCS mode instead to retrieve them'),
97     False)
98 parser.add_option('', 'ftp', 'boolean', 'ftp',
99     _('Optional: Do not embed archives for products in archive mode.' 
100     'Sat prepare will use ftp instead to retrieve them'),
101     False)
102 parser.add_option('p', 'project', 'string', 'project',
103     _('Optional: Produce an archive that contains a project.'), "")
104 parser.add_option('t', 'salometools', 'boolean', 'sat',
105     _('Optional: Produce an archive that contains salomeTools.'), False)
106 parser.add_option('n', 'name', 'string', 'name',
107     _('Optional: The name or full path of the archive.'), None)
108 parser.add_option('', 'add_files', 'list2', 'add_files',
109     _('Optional: The list of additional files to add to the archive.'), [])
110 parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
111     _('Optional: do not add commercial licence.'), False)
112 parser.add_option('', 'without_properties', 'properties', 'without_properties',
113     _('Optional: Filter the products by their properties.\n\tSyntax: '
114       '--without_properties <property>:<value>'))
115
116
117 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
118     '''Create an archive containing all directories and files that are given in
119        the d_content argument.
120     
121     :param tar tarfile: The tarfile instance used to make the archive.
122     :param name_archive str: The name of the archive to make.
123     :param d_content dict: The dictionary that contain all directories and files
124                            to add in the archive.
125                            d_content[label] = 
126                                         (path_on_local_machine, path_in_archive)
127     :param logger Logger: the logging instance
128     :param f_exclude Function: the function that filters
129     :return: 0 if success, 1 if not.
130     :rtype: int
131     '''
132     # get the max length of the messages in order to make the display
133     max_len = len(max(d_content.keys(), key=len))
134     
135     success = 0
136     # loop over each directory or file stored in the d_content dictionary
137     names = sorted(d_content.keys())
138     DBG.write("add tar names", names)
139
140     for name in names:
141         # display information
142         len_points = max_len - len(name) + 3
143         local_path, archive_path = d_content[name]
144         in_archive = os.path.join(name_archive, archive_path)
145         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
146         # Get the local path and the path in archive 
147         # of the directory or file to add
148         # Add it in the archive
149         try:
150             tar.add(local_path, arcname=in_archive, exclude=f_exclude)
151             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
152         except Exception as e:
153             logger.write(src.printcolors.printcError(_("KO ")), 3)
154             logger.write(str(e), 3)
155             success = 1
156         logger.write("\n", 3)
157     return success
158
159 def exclude_VCS_and_extensions(filename):
160     ''' The function that is used to exclude from package the link to the 
161         VCS repositories (like .git)
162
163     :param filename Str: The filname to exclude (or not).
164     :return: True if the file has to be exclude
165     :rtype: Boolean
166     '''
167     for dir_name in IGNORED_DIRS:
168         if dir_name in filename:
169             return True
170     for extension in IGNORED_EXTENSIONS:
171         if filename.endswith(extension):
172             return True
173     return False
174
175 def produce_relative_launcher(config,
176                               logger,
177                               file_dir,
178                               file_name,
179                               binaries_dir_name,
180                               with_commercial=True):
181     '''Create a specific SALOME launcher for the binary package. This launcher 
182        uses relative paths.
183     
184     :param config Config: The global configuration.
185     :param logger Logger: the logging instance
186     :param file_dir str: the directory where to put the launcher
187     :param file_name str: The launcher name
188     :param binaries_dir_name str: the name of the repository where the binaries
189                                   are, in the archive.
190     :return: the path of the produced launcher
191     :rtype: str
192     '''
193     
194     # get KERNEL installation path 
195     kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
196
197     # set kernel bin dir (considering fhs property)
198     kernel_cfg = src.product.get_product_config(config, "KERNEL")
199     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
200         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin") 
201     else:
202         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome") 
203
204     # check if the application contains an application module
205     # check also if the application has a distene product, 
206     # in this case get its licence file name
207     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
208     salome_application_name="Not defined" 
209     distene_licence_file_name=False
210     for prod_name, prod_info in l_product_info:
211         # look for a "salome application" and a distene product
212         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
213             distene_licence_file_name = src.product.product_has_licence(prod_info, 
214                                             config.PATHS.LICENCEPATH) 
215         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
216             salome_application_name=prod_info.name
217
218     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
219     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
220     if salome_application_name == "Not defined":
221         app_root_dir=kernel_root_dir
222     else:
223         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
224
225     # Get the launcher template and do substitutions
226     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
227         withProfile = src.fileEnviron.withProfile3
228     else:
229         withProfile = src.fileEnviron.withProfile
230
231     withProfile = withProfile.replace(
232         "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
233         "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + app_root_dir + "'")
234     withProfile = withProfile.replace(
235         " 'BIN_KERNEL_INSTALL_DIR'",
236         " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
237
238     before, after = withProfile.split("# here your local standalone environment\n")
239
240     # create an environment file writer
241     writer = src.environment.FileEnvWriter(config,
242                                            logger,
243                                            file_dir,
244                                            src_root=None)
245     
246     filepath = os.path.join(file_dir, file_name)
247     # open the file and write into it
248     launch_file = open(filepath, "w")
249     launch_file.write(before)
250     # Write
251     writer.write_cfgForPy_file(launch_file,
252                                for_package = binaries_dir_name,
253                                with_commercial=with_commercial)
254     launch_file.write(after)
255     launch_file.close()
256     
257     # Little hack to put out_dir_Path outside the strings
258     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
259     
260     # A hack to put a call to a file for distene licence.
261     # It does nothing to an application that has no distene product
262     if distene_licence_file_name:
263         logger.write("Application has a distene licence file! We use it in package launcher", 5)
264         hack_for_distene_licence(filepath, distene_licence_file_name)
265        
266     # change the rights in order to make the file executable for everybody
267     os.chmod(filepath,
268              stat.S_IRUSR |
269              stat.S_IRGRP |
270              stat.S_IROTH |
271              stat.S_IWUSR |
272              stat.S_IXUSR |
273              stat.S_IXGRP |
274              stat.S_IXOTH)
275
276     return filepath
277
278 def hack_for_distene_licence(filepath, licence_file):
279     '''Replace the distene licence env variable by a call to a file.
280     
281     :param filepath Str: The path to the launcher to modify.
282     '''  
283     shutil.move(filepath, filepath + "_old")
284     fileout= filepath
285     filein = filepath + "_old"
286     fin = open(filein, "r")
287     fout = open(fileout, "w")
288     text = fin.readlines()
289     # Find the Distene section
290     num_line = -1
291     for i,line in enumerate(text):
292         if "# Set DISTENE License" in line:
293             num_line = i
294             break
295     if num_line == -1:
296         # No distene product, there is nothing to do
297         fin.close()
298         for line in text:
299             fout.write(line)
300         fout.close()
301         return
302     del text[num_line +1]
303     del text[num_line +1]
304     text_to_insert ="""    try:
305         distene_licence_file="%s"
306         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
307             import importlib.util
308             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
309             distene=importlib.util.module_from_spec(spec_dist)
310             spec_dist.loader.exec_module(distene)
311         else:
312             import imp
313             distene = imp.load_source('distene_licence', distene_licence_file)
314         distene.set_distene_variables(context)
315     except:
316         pass\n"""  % licence_file
317     text.insert(num_line + 1, text_to_insert)
318     for line in text:
319         fout.write(line)
320     fin.close()    
321     fout.close()
322     return
323     
324 def produce_relative_env_files(config,
325                               logger,
326                               file_dir,
327                               binaries_dir_name):
328     '''Create some specific environment files for the binary package. These 
329        files use relative paths.
330     
331     :param config Config: The global configuration.
332     :param logger Logger: the logging instance
333     :param file_dir str: the directory where to put the files
334     :param binaries_dir_name str: the name of the repository where the binaries
335                                   are, in the archive.
336     :return: the list of path of the produced environment files
337     :rtype: List
338     '''  
339     # create an environment file writer
340     writer = src.environment.FileEnvWriter(config,
341                                            logger,
342                                            file_dir,
343                                            src_root=None)
344     
345     # Write
346     filepath = writer.write_env_file("env_launch.sh",
347                           False, # for launch
348                           "bash",
349                           for_package = binaries_dir_name)
350
351     # Little hack to put out_dir_Path as environment variable
352     src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
353
354     # change the rights in order to make the file executable for everybody
355     os.chmod(filepath,
356              stat.S_IRUSR |
357              stat.S_IRGRP |
358              stat.S_IROTH |
359              stat.S_IWUSR |
360              stat.S_IXUSR |
361              stat.S_IXGRP |
362              stat.S_IXOTH)
363     
364     return filepath
365
366 def produce_install_bin_file(config,
367                              logger,
368                              file_dir,
369                              d_sub,
370                              file_name):
371     '''Create a bash shell script which do substitutions in BIRARIES dir 
372        in order to use it for extra compilations.
373     
374     :param config Config: The global configuration.
375     :param logger Logger: the logging instance
376     :param file_dir str: the directory where to put the files
377     :param d_sub, dict: the dictionnary that contains the substitutions to be done
378     :param file_name str: the name of the install script file
379     :return: the produced file
380     :rtype: str
381     '''  
382     # Write
383     filepath = os.path.join(file_dir, file_name)
384     # open the file and write into it
385     # use codec utf-8 as sat variables are in unicode
386     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
387         installbin_template_path = os.path.join(config.VARS.internal_dir,
388                                         "INSTALL_BIN.template")
389         
390         # build the name of the directory that will contain the binaries
391         binaries_dir_name = "BINARIES-" + config.VARS.dist
392         # build the substitution loop
393         loop_cmd = "for f in $(grep -RIl"
394         for key in d_sub:
395             loop_cmd += " -e "+ key
396         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
397                     '); do\n     sed -i "\n'
398         for key in d_sub:
399             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
400         loop_cmd += '            " $f\ndone'
401
402         d={}
403         d["BINARIES_DIR"] = binaries_dir_name
404         d["SUBSTITUTION_LOOP"]=loop_cmd
405         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
406         
407         # substitute the template and write it in file
408         content=src.template.substitute(installbin_template_path, d)
409         installbin_file.write(content)
410         # change the rights in order to make the file executable for everybody
411         os.chmod(filepath,
412                  stat.S_IRUSR |
413                  stat.S_IRGRP |
414                  stat.S_IROTH |
415                  stat.S_IWUSR |
416                  stat.S_IXUSR |
417                  stat.S_IXGRP |
418                  stat.S_IXOTH)
419     
420     return filepath
421
422 def product_appli_creation_script(config,
423                                   logger,
424                                   file_dir,
425                                   binaries_dir_name):
426     '''Create a script that can produce an application (EDF style) in the binary
427        package.
428     
429     :param config Config: The global configuration.
430     :param logger Logger: the logging instance
431     :param file_dir str: the directory where to put the file
432     :param binaries_dir_name str: the name of the repository where the binaries
433                                   are, in the archive.
434     :return: the path of the produced script file
435     :rtype: Str
436     '''
437     template_name = "create_appli.py.for_bin_packages.template"
438     template_path = os.path.join(config.VARS.internal_dir, template_name)
439     text_to_fill = open(template_path, "r").read()
440     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
441                                         '"' + binaries_dir_name + '"')
442     
443     text_to_add = ""
444     for product_name in get_SALOME_modules(config):
445         product_info = src.product.get_product_config(config, product_name)
446        
447         if src.product.product_is_smesh_plugin(product_info):
448             continue
449
450         if 'install_dir' in product_info and bool(product_info.install_dir):
451             if src.product.product_is_cpp(product_info):
452                 # cpp module
453                 for cpp_name in src.product.get_product_components(product_info):
454                     line_to_add = ("<module name=\"" + 
455                                    cpp_name + 
456                                    "\" gui=\"yes\" path=\"''' + "
457                                    "os.path.join(dir_bin_name, \"" + 
458                                    cpp_name + "\") + '''\"/>")
459             else:
460                 # regular module
461                 line_to_add = ("<module name=\"" + 
462                                product_name + 
463                                "\" gui=\"yes\" path=\"''' + "
464                                "os.path.join(dir_bin_name, \"" + 
465                                product_name + "\") + '''\"/>")
466             text_to_add += line_to_add + "\n"
467     
468     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
469     
470     tmp_file_path = os.path.join(file_dir, "create_appli.py")
471     ff = open(tmp_file_path, "w")
472     ff.write(filled_text)
473     ff.close()
474     
475     # change the rights in order to make the file executable for everybody
476     os.chmod(tmp_file_path,
477              stat.S_IRUSR |
478              stat.S_IRGRP |
479              stat.S_IROTH |
480              stat.S_IWUSR |
481              stat.S_IXUSR |
482              stat.S_IXGRP |
483              stat.S_IXOTH)
484     
485     return tmp_file_path
486
487 def binary_package(config, logger, options, tmp_working_dir):
488     '''Prepare a dictionary that stores all the needed directories and files to
489        add in a binary package.
490     
491     :param config Config: The global configuration.
492     :param logger Logger: the logging instance
493     :param options OptResult: the options of the launched command
494     :param tmp_working_dir str: The temporary local directory containing some 
495                                 specific directories or files needed in the 
496                                 binary package
497     :return: the dictionary that stores all the needed directories and files to
498              add in a binary package.
499              {label : (path_on_local_machine, path_in_archive)}
500     :rtype: dict
501     '''
502
503     # Get the list of product installation to add to the archive
504     l_products_name = sorted(config.APPLICATION.products.keys())
505     l_product_info = src.product.get_products_infos(l_products_name,
506                                                     config)
507     l_install_dir = []
508     l_source_dir = []
509     l_not_installed = []
510     l_sources_not_present = []
511     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
512     if ("APPLICATION" in config  and
513         "properties"  in config.APPLICATION  and
514         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
515         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
516             generate_mesa_launcher=True
517
518     for prod_name, prod_info in l_product_info:
519         # skip product with property not_in_package set to yes
520         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
521             continue  
522
523         # Add the sources of the products that have the property 
524         # sources_in_package : "yes"
525         if src.get_property_in_product_cfg(prod_info,
526                                            "sources_in_package") == "yes":
527             if os.path.exists(prod_info.source_dir):
528                 l_source_dir.append((prod_name, prod_info.source_dir))
529             else:
530                 l_sources_not_present.append(prod_name)
531
532         # ignore the native and fixed products for install directories
533         if (src.product.product_is_native(prod_info) 
534                 or src.product.product_is_fixed(prod_info)
535                 or not src.product.product_compiles(prod_info)):
536             continue
537         if src.product.check_installation(prod_info):
538             l_install_dir.append((prod_name, prod_info.install_dir))
539         else:
540             l_not_installed.append(prod_name)
541         
542         # Add also the cpp generated modules (if any)
543         if src.product.product_is_cpp(prod_info):
544             # cpp module
545             for name_cpp in src.product.get_product_components(prod_info):
546                 install_dir = os.path.join(config.APPLICATION.workdir,
547                                            config.INTERNAL.config.install_dir,
548                                            name_cpp) 
549                 if os.path.exists(install_dir):
550                     l_install_dir.append((name_cpp, install_dir))
551                 else:
552                     l_not_installed.append(name_cpp)
553         
554     # check the name of the directory that (could) contains the binaries 
555     # from previous detar
556     binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
557     if os.path.exists(binaries_from_detar):
558          logger.write("""
559 WARNING: existing binaries directory from previous detar installation:
560          %s
561          To make new package from this, you have to: 
562          1) install binaries in INSTALL directory with the script "install_bin.sh" 
563             see README file for more details
564          2) or recompile everything in INSTALL with "sat compile" command 
565             this step is long, and requires some linux packages to be installed 
566             on your system\n
567 """ % binaries_from_detar)
568     
569     # Print warning or error if there are some missing products
570     if len(l_not_installed) > 0:
571         text_missing_prods = ""
572         for p_name in l_not_installed:
573             text_missing_prods += "-" + p_name + "\n"
574         if not options.force_creation:
575             msg = _("ERROR: there are missing products installations:")
576             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
577                                      text_missing_prods),
578                          1)
579             return None
580         else:
581             msg = _("WARNING: there are missing products installations:")
582             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
583                                      text_missing_prods),
584                          1)
585
586     # Do the same for sources
587     if len(l_sources_not_present) > 0:
588         text_missing_prods = ""
589         for p_name in l_sources_not_present:
590             text_missing_prods += "-" + p_name + "\n"
591         if not options.force_creation:
592             msg = _("ERROR: there are missing products sources:")
593             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
594                                      text_missing_prods),
595                          1)
596             return None
597         else:
598             msg = _("WARNING: there are missing products sources:")
599             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
600                                      text_missing_prods),
601                          1)
602  
603     # construct the name of the directory that will contain the binaries
604     binaries_dir_name = "BINARIES-" + config.VARS.dist
605     
606     # construct the correlation table between the product names, there 
607     # actual install directories and there install directory in archive
608     d_products = {}
609     for prod_name, install_dir in l_install_dir:
610         path_in_archive = os.path.join(binaries_dir_name, prod_name)
611         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
612         
613     for prod_name, source_dir in l_source_dir:
614         path_in_archive = os.path.join("SOURCES", prod_name)
615         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
616
617     # for packages of SALOME applications including KERNEL, 
618     # we produce a salome launcher or a virtual application (depending on salome version)
619     if 'KERNEL' in config.APPLICATION.products:
620         VersionSalome = src.get_salome_version(config)
621         # Case where SALOME has the launcher that uses the SalomeContext API
622         if VersionSalome >= 730:
623             # create the relative launcher and add it to the files to add
624             launcher_name = src.get_launcher_name(config)
625             launcher_package = produce_relative_launcher(config,
626                                                  logger,
627                                                  tmp_working_dir,
628                                                  launcher_name,
629                                                  binaries_dir_name,
630                                                  not(options.without_commercial))
631             d_products["launcher"] = (launcher_package, launcher_name)
632
633             # if the application contains mesa products, we generate in addition to the 
634             # classical salome launcher a launcher using mesa and called mesa_salome 
635             # (the mesa launcher will be used for remote usage through ssh).
636             if generate_mesa_launcher:
637                 #if there is one : store the use_mesa property
638                 restore_use_mesa_option=None
639                 if ('properties' in config.APPLICATION and 
640                     'use_mesa' in config.APPLICATION.properties):
641                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
642
643                 # activate mesa property, and generate a mesa launcher
644                 src.activate_mesa_property(config)  #activate use_mesa property
645                 launcher_mesa_name="mesa_"+launcher_name
646                 launcher_package_mesa = produce_relative_launcher(config,
647                                                      logger,
648                                                      tmp_working_dir,
649                                                      launcher_mesa_name,
650                                                      binaries_dir_name,
651                                                      not(options.without_commercial))
652                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
653
654                 # if there was a use_mesa value, we restore it
655                 # else we set it to the default value "no"
656                 if restore_use_mesa_option != None:
657                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
658                 else:
659                     config.APPLICATION.properties.use_mesa="no"
660
661             if options.sources:
662                 # if we mix binaries and sources, we add a copy of the launcher, 
663                 # prefixed  with "bin",in order to avoid clashes
664                 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
665         else:
666             # Provide a script for the creation of an application EDF style
667             appli_script = product_appli_creation_script(config,
668                                                         logger,
669                                                         tmp_working_dir,
670                                                         binaries_dir_name)
671             
672             d_products["appli script"] = (appli_script, "create_appli.py")
673
674     # Put also the environment file
675     env_file = produce_relative_env_files(config,
676                                            logger,
677                                            tmp_working_dir,
678                                            binaries_dir_name)
679
680     d_products["environment file"] = (env_file, "env_launch.sh")
681       
682     return d_products
683
684 def source_package(sat, config, logger, options, tmp_working_dir):
685     '''Prepare a dictionary that stores all the needed directories and files to
686        add in a source package.
687     
688     :param config Config: The global configuration.
689     :param logger Logger: the logging instance
690     :param options OptResult: the options of the launched command
691     :param tmp_working_dir str: The temporary local directory containing some 
692                                 specific directories or files needed in the 
693                                 binary package
694     :return: the dictionary that stores all the needed directories and files to
695              add in a source package.
696              {label : (path_on_local_machine, path_in_archive)}
697     :rtype: dict
698     '''
699     
700     d_archives={}
701     # Get all the products that are prepared using an archive
702     # unless ftp mode is specified (in this case the user of the
703     # archive will get the sources through the ftp mode of sat prepare
704     if not options.ftp:
705         logger.write("Find archive products ... ")
706         d_archives, l_pinfo_vcs = get_archives(config, logger)
707         logger.write("Done\n")
708
709     d_archives_vcs = {}
710     if not options.with_vcs and len(l_pinfo_vcs) > 0:
711         # Make archives with the products that are not prepared using an archive
712         # (git, cvs, svn, etc)
713         logger.write("Construct archives for vcs products ... ")
714         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
715                                           sat,
716                                           config,
717                                           logger,
718                                           tmp_working_dir)
719         logger.write("Done\n")
720
721     # Create a project
722     logger.write("Create the project ... ")
723     d_project = create_project_for_src_package(config,
724                                                tmp_working_dir,
725                                                options.with_vcs,
726                                                options.ftp)
727     logger.write("Done\n")
728     
729     # Add salomeTools
730     tmp_sat = add_salomeTools(config, tmp_working_dir)
731     d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
732     
733     # Add a sat symbolic link if not win
734     if not src.architecture.is_windows():
735         tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
736         try:
737             t = os.getcwd()
738         except:
739             # In the jobs, os.getcwd() can fail
740             t = config.LOCAL.workdir
741         os.chdir(tmp_working_dir)
742         if os.path.lexists(tmp_satlink_path):
743             os.remove(tmp_satlink_path)
744         os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
745         os.chdir(t)
746         
747         d_sat["sat link"] = (tmp_satlink_path, "sat")
748     
749     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
750     return d_source
751
752 def get_archives(config, logger):
753     '''Find all the products that are get using an archive and all the products
754        that are get using a vcs (git, cvs, svn) repository.
755     
756     :param config Config: The global configuration.
757     :param logger Logger: the logging instance
758     :return: the dictionary {name_product : 
759              (local path of its archive, path in the package of its archive )}
760              and the list of specific configuration corresponding to the vcs 
761              products
762     :rtype: (Dict, List)
763     '''
764     # Get the list of product informations
765     l_products_name = config.APPLICATION.products.keys()
766     l_product_info = src.product.get_products_infos(l_products_name,
767                                                     config)
768     d_archives = {}
769     l_pinfo_vcs = []
770     for p_name, p_info in l_product_info:
771         # skip product with property not_in_package set to yes
772         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
773             continue  
774         # ignore the native and fixed products
775         if (src.product.product_is_native(p_info) 
776                 or src.product.product_is_fixed(p_info)):
777             continue
778         if p_info.get_source == "archive":
779             archive_path = p_info.archive_info.archive_name
780             archive_name = os.path.basename(archive_path)
781         else:
782             l_pinfo_vcs.append((p_name, p_info))
783             
784         d_archives[p_name] = (archive_path,
785                               os.path.join(ARCHIVE_DIR, archive_name))
786     return d_archives, l_pinfo_vcs
787
788 def add_salomeTools(config, tmp_working_dir):
789     '''Prepare a version of salomeTools that has a specific local.pyconf file 
790        configured for a source package.
791
792     :param config Config: The global configuration.
793     :param tmp_working_dir str: The temporary local directory containing some 
794                                 specific directories or files needed in the 
795                                 source package
796     :return: The path to the local salomeTools directory to add in the package
797     :rtype: str
798     '''
799     # Copy sat in the temporary working directory
800     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
801     sat_running_path = src.Path(config.VARS.salometoolsway)
802     sat_running_path.copy(sat_tmp_path)
803     
804     # Update the local.pyconf file that contains the path to the project
805     local_pyconf_name = "local.pyconf"
806     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
807     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
808     # Remove the .pyconf file in the root directory of salomeTools if there is
809     # any. (For example when launching jobs, a pyconf file describing the jobs 
810     # can be here and is not useful) 
811     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
812     for file_or_dir in files_or_dir_SAT:
813         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
814             file_path = os.path.join(tmp_working_dir,
815                                      "salomeTools",
816                                      file_or_dir)
817             os.remove(file_path)
818     
819     ff = open(local_pyconf_file, "w")
820     ff.write(LOCAL_TEMPLATE)
821     ff.close()
822     
823     return sat_tmp_path.path
824
825 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
826     '''For sources package that require that all products are get using an 
827        archive, one has to create some archive for the vcs products.
828        So this method calls the clean and source command of sat and then create
829        the archives.
830
831     :param l_pinfo_vcs List: The list of specific configuration corresponding to
832                              each vcs product
833     :param sat Sat: The Sat instance that can be called to clean and source the
834                     products
835     :param config Config: The global configuration.
836     :param logger Logger: the logging instance
837     :param tmp_working_dir str: The temporary local directory containing some 
838                                 specific directories or files needed in the 
839                                 source package
840     :return: the dictionary that stores all the archives to add in the source 
841              package. {label : (path_on_local_machine, path_in_archive)}
842     :rtype: dict
843     '''
844     # clean the source directory of all the vcs products, then use the source 
845     # command and thus construct an archive that will not contain the patches
846     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
847     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
848       logger.write(_("\nclean sources\n"))
849       args_clean = config.VARS.application
850       args_clean += " --sources --products "
851       args_clean += ",".join(l_prod_names)
852       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
853       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
854     if True:
855       # source
856       logger.write(_("get sources\n"))
857       args_source = config.VARS.application
858       args_source += " --products "
859       args_source += ",".join(l_prod_names)
860       svgDir = sat.cfg.APPLICATION.workdir
861       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
862       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
863       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
864       # DBG.write("sat config id", id(sat.cfg), True)
865       # shit as config is not same id() as for sat.source()
866       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
867       import source
868       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
869       
870       # make the new archives
871       d_archives_vcs = {}
872       for pn, pinfo in l_pinfo_vcs:
873           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
874           logger.write("make archive vcs '%s'\n" % path_archive)
875           d_archives_vcs[pn] = (path_archive,
876                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
877       sat.cfg.APPLICATION.workdir = svgDir
878       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
879     return d_archives_vcs
880
881 def make_archive(prod_name, prod_info, where):
882     '''Create an archive of a product by searching its source directory.
883
884     :param prod_name str: The name of the product.
885     :param prod_info Config: The specific configuration corresponding to the 
886                              product
887     :param where str: The path of the repository where to put the resulting 
888                       archive
889     :return: The path of the resulting archive
890     :rtype: str
891     '''
892     path_targz_prod = os.path.join(where, prod_name + ".tgz")
893     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
894     local_path = prod_info.source_dir
895     tar_prod.add(local_path,
896                  arcname=prod_name,
897                  exclude=exclude_VCS_and_extensions)
898     tar_prod.close()
899     return path_targz_prod       
900
901 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
902     '''Create a specific project for a source package.
903
904     :param config Config: The global configuration.
905     :param tmp_working_dir str: The temporary local directory containing some 
906                                 specific directories or files needed in the 
907                                 source package
908     :param with_vcs boolean: True if the package is with vcs products (not 
909                              transformed into archive products)
910     :param with_ftp boolean: True if the package use ftp servers to get archives
911     :return: The dictionary 
912              {"project" : (produced project, project path in the archive)}
913     :rtype: Dict
914     '''
915
916     # Create in the working temporary directory the full project tree
917     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
918     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
919                                          "products")
920     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
921                                          "products",
922                                          "compil_scripts")
923     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
924                                          "products",
925                                          "env_scripts")
926     patches_tmp_dir = os.path.join(project_tmp_dir,
927                                          "products",
928                                          "patches")
929     application_tmp_dir = os.path.join(project_tmp_dir,
930                                          "applications")
931     for directory in [project_tmp_dir,
932                       compil_scripts_tmp_dir,
933                       env_scripts_tmp_dir,
934                       patches_tmp_dir,
935                       application_tmp_dir]:
936         src.ensure_path_exists(directory)
937
938     # Create the pyconf that contains the information of the project
939     project_pyconf_name = "project.pyconf"        
940     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
941     ff = open(project_pyconf_file, "w")
942     ff.write(PROJECT_TEMPLATE)
943     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
944         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
945         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
946             ftp_path=ftp_path+":"+ftpserver
947         ftp_path+='"'
948         ff.write("# ftp servers where to search for prerequisite archives\n")
949         ff.write(ftp_path)
950     # add licence paths if any
951     if len(config.PATHS.LICENCEPATH) > 0:  
952         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
953         for path in config.PATHS.LICENCEPATH[1:]:
954             licence_path=licence_path+":"+path
955         licence_path+='"'
956         ff.write("\n# Where to search for licences\n")
957         ff.write(licence_path)
958         
959
960     ff.close()
961     
962     # Loop over the products to get there pyconf and all the scripts 
963     # (compilation, environment, patches)
964     # and create the pyconf file to add to the project
965     lproducts_name = config.APPLICATION.products.keys()
966     l_products = src.product.get_products_infos(lproducts_name, config)
967     for p_name, p_info in l_products:
968         # skip product with property not_in_package set to yes
969         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
970             continue  
971         find_product_scripts_and_pyconf(p_name,
972                                         p_info,
973                                         config,
974                                         with_vcs,
975                                         compil_scripts_tmp_dir,
976                                         env_scripts_tmp_dir,
977                                         patches_tmp_dir,
978                                         products_pyconf_tmp_dir)
979     
980     find_application_pyconf(config, application_tmp_dir)
981     
982     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
983     return d_project
984
985 def find_product_scripts_and_pyconf(p_name,
986                                     p_info,
987                                     config,
988                                     with_vcs,
989                                     compil_scripts_tmp_dir,
990                                     env_scripts_tmp_dir,
991                                     patches_tmp_dir,
992                                     products_pyconf_tmp_dir):
993     '''Create a specific pyconf file for a given product. Get its environment 
994        script, its compilation script and patches and put it in the temporary
995        working directory. This method is used in the source package in order to
996        construct the specific project.
997
998     :param p_name str: The name of the product.
999     :param p_info Config: The specific configuration corresponding to the 
1000                              product
1001     :param config Config: The global configuration.
1002     :param with_vcs boolean: True if the package is with vcs products (not 
1003                              transformed into archive products)
1004     :param compil_scripts_tmp_dir str: The path to the temporary compilation 
1005                                        scripts directory of the project.
1006     :param env_scripts_tmp_dir str: The path to the temporary environment script 
1007                                     directory of the project.
1008     :param patches_tmp_dir str: The path to the temporary patch scripts 
1009                                 directory of the project.
1010     :param products_pyconf_tmp_dir str: The path to the temporary product 
1011                                         scripts directory of the project.
1012     '''
1013     
1014     # read the pyconf of the product
1015     product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
1016                                            config.PATHS.PRODUCTPATH)
1017     product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
1018
1019     # find the compilation script if any
1020     if src.product.product_has_script(p_info):
1021         compil_script_path = src.Path(p_info.compil_script)
1022         compil_script_path.copy(compil_scripts_tmp_dir)
1023         product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
1024                                                     p_info.compil_script)
1025     # find the environment script if any
1026     if src.product.product_has_env_script(p_info):
1027         env_script_path = src.Path(p_info.environ.env_script)
1028         env_script_path.copy(env_scripts_tmp_dir)
1029         product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
1030                                                 p_info.environ.env_script)
1031     # find the patches if any
1032     if src.product.product_has_patches(p_info):
1033         patches = src.pyconf.Sequence()
1034         for patch_path in p_info.patches:
1035             p_path = src.Path(patch_path)
1036             p_path.copy(patches_tmp_dir)
1037             patches.append(os.path.basename(patch_path), "")
1038
1039         product_pyconf_cfg[p_info.section].patches = patches
1040     
1041     if with_vcs:
1042         # put in the pyconf file the resolved values
1043         for info in ["git_info", "cvs_info", "svn_info"]:
1044             if info in p_info:
1045                 for key in p_info[info]:
1046                     product_pyconf_cfg[p_info.section][info][key] = p_info[
1047                                                                       info][key]
1048     else:
1049         # if the product is not archive, then make it become archive.
1050         if src.product.product_is_vcs(p_info):
1051             product_pyconf_cfg[p_info.section].get_source = "archive"
1052             if not "archive_info" in product_pyconf_cfg[p_info.section]:
1053                 product_pyconf_cfg[p_info.section].addMapping("archive_info",
1054                                         src.pyconf.Mapping(product_pyconf_cfg),
1055                                         "")
1056             product_pyconf_cfg[p_info.section
1057                               ].archive_info.archive_name = p_info.name + ".tgz"
1058     
1059     # write the pyconf file to the temporary project location
1060     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1061                                            p_name + ".pyconf")
1062     ff = open(product_tmp_pyconf_path, 'w')
1063     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1064     product_pyconf_cfg.__save__(ff, 1)
1065     ff.close()
1066
1067 def find_application_pyconf(config, application_tmp_dir):
1068     '''Find the application pyconf file and put it in the specific temporary 
1069        directory containing the specific project of a source package.
1070
1071     :param config Config: The global configuration.
1072     :param application_tmp_dir str: The path to the temporary application 
1073                                        scripts directory of the project.
1074     '''
1075     # read the pyconf of the application
1076     application_name = config.VARS.application
1077     application_pyconf_path = src.find_file_in_lpath(
1078                                             application_name + ".pyconf",
1079                                             config.PATHS.APPLICATIONPATH)
1080     application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1081     
1082     # Change the workdir
1083     application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1084                                     application_pyconf_cfg,
1085                                     src.pyconf.DOLLAR,
1086                                     'VARS.salometoolsway + $VARS.sep + ".."')
1087
1088     # Prevent from compilation in base
1089     application_pyconf_cfg.APPLICATION.no_base = "yes"
1090     
1091     #remove products that are not in config (which were filtered by --without_properties)
1092     for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1093         if product_name not in config.APPLICATION.products.keys():
1094             application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1095
1096     # write the pyconf file to the temporary application location
1097     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1098                                                application_name + ".pyconf")
1099
1100     ff = open(application_tmp_pyconf_path, 'w')
1101     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1102     application_pyconf_cfg.__save__(ff, 1)
1103     ff.close()
1104
1105 def sat_package(config, tmp_working_dir, options, logger):
1106     '''Prepare a dictionary that stores all the needed directories and files to
1107        add in a salomeTool package.
1108     
1109     :param tmp_working_dir str: The temporary local working directory 
1110     :param options OptResult: the options of the launched command
1111     :return: the dictionary that stores all the needed directories and files to
1112              add in a salomeTool package.
1113              {label : (path_on_local_machine, path_in_archive)}
1114     :rtype: dict
1115     '''
1116     d_project = {}
1117
1118     # we include sat himself
1119     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1120
1121     # and we overwrite local.pyconf with a clean wersion.
1122     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1123     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1124     local_cfg = src.pyconf.Config(local_file_path)
1125     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1126     local_cfg.LOCAL["base"] = "default"
1127     local_cfg.LOCAL["workdir"] = "default"
1128     local_cfg.LOCAL["log_dir"] = "default"
1129     local_cfg.LOCAL["archive_dir"] = "default"
1130     local_cfg.LOCAL["VCS"] = "None"
1131     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1132
1133     # if the archive contains a project, we write its relative path in local.pyconf
1134     if options.project:
1135         project_arch_path = os.path.join("projects", options.project, 
1136                                          os.path.basename(options.project_file_path))
1137         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1138
1139     ff = open(local_pyconf_tmp_path, 'w')
1140     local_cfg.__save__(ff, 1)
1141     ff.close()
1142     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1143     return d_project
1144     
1145
1146 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1147     '''Prepare a dictionary that stores all the needed directories and files to
1148        add in a project package.
1149     
1150     :param project_file_path str: The path to the local project.
1151     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1152     :param tmp_working_dir str: The temporary local directory containing some 
1153                                 specific directories or files needed in the 
1154                                 project package
1155     :param embedded_in_sat boolean : the project package is embedded in a sat package
1156     :return: the dictionary that stores all the needed directories and files to
1157              add in a project package.
1158              {label : (path_on_local_machine, path_in_archive)}
1159     :rtype: dict
1160     '''
1161     d_project = {}
1162     # Read the project file and get the directories to add to the package
1163     
1164     try: 
1165       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1166     except:
1167       logger.write("""
1168 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1169       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1170       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1171     
1172     paths = {"APPLICATIONPATH" : "applications",
1173              "PRODUCTPATH" : "products",
1174              "JOBPATH" : "jobs",
1175              "MACHINEPATH" : "machines"}
1176     if not ftp_mode:
1177         paths["ARCHIVEPATH"] = "archives"
1178
1179     # Loop over the project paths and add it
1180     project_file_name = os.path.basename(project_file_path)
1181     for path in paths:
1182         if path not in project_pyconf_cfg:
1183             continue
1184         if embedded_in_sat:
1185             dest_path = os.path.join("projects", name_project, paths[path])
1186             project_file_dest = os.path.join("projects", name_project, project_file_name)
1187         else:
1188             dest_path = paths[path]
1189             project_file_dest = project_file_name
1190
1191         # Add the directory to the files to add in the package
1192         d_project[path] = (project_pyconf_cfg[path], dest_path)
1193
1194         # Modify the value of the path in the package
1195         project_pyconf_cfg[path] = src.pyconf.Reference(
1196                                     project_pyconf_cfg,
1197                                     src.pyconf.DOLLAR,
1198                                     'project_path + "/' + paths[path] + '"')
1199     
1200     # Modify some values
1201     if "project_path" not in project_pyconf_cfg:
1202         project_pyconf_cfg.addMapping("project_path",
1203                                       src.pyconf.Mapping(project_pyconf_cfg),
1204                                       "")
1205     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1206                                                            src.pyconf.DOLLAR,
1207                                                            'PWD')
1208     # we don't want to export these two fields
1209     project_pyconf_cfg.__delitem__("file_path")
1210     project_pyconf_cfg.__delitem__("PWD")
1211     if ftp_mode:
1212         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1213     
1214     # Write the project pyconf file
1215     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1216     ff = open(project_pyconf_tmp_path, 'w')
1217     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1218     project_pyconf_cfg.__save__(ff, 1)
1219     ff.close()
1220     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1221     
1222     return d_project
1223
1224 def add_readme(config, options, where):
1225     readme_path = os.path.join(where, "README")
1226     with codecs.open(readme_path, "w", 'utf-8') as f:
1227
1228     # templates for building the header
1229         readme_header="""
1230 # This package was generated with sat $version
1231 # Date: $date
1232 # User: $user
1233 # Distribution : $dist
1234
1235 In the following, $$ROOT represents the directory where you have installed 
1236 SALOME (the directory where this file is located).
1237
1238 """
1239         readme_compilation_with_binaries="""
1240
1241 compilation based on the binaries used as prerequisites
1242 =======================================================
1243
1244 If you fail to compile the complete application (for example because
1245 you are not root on your system and cannot install missing packages), you
1246 may try a partial compilation based on the binaries.
1247 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1248 and do some substitutions on cmake and .la files (replace the build directories
1249 with local paths).
1250 The procedure to do it is:
1251  1) Remove or rename INSTALL directory if it exists
1252  2) Execute the shell script install_bin.sh:
1253  > cd $ROOT
1254  > ./install_bin.sh
1255  3) Use SalomeTool (as explained in Sources section) and compile only the 
1256     modules you need to (with -p option)
1257
1258 """
1259         readme_header_tpl=string.Template(readme_header)
1260         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1261                 "README_BIN.template")
1262         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1263                 "README_LAUNCHER.template")
1264         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1265                 "README_BIN_VIRTUAL_APP.template")
1266         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1267                 "README_SRC.template")
1268         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1269                 "README_PROJECT.template")
1270         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1271                 "README_SAT.template")
1272
1273         # prepare substitution dictionary
1274         d = dict()
1275         d['user'] = config.VARS.user
1276         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1277         d['version'] = src.get_salometool_version(config)
1278         d['dist'] = config.VARS.dist
1279         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1280
1281         if options.binaries or options.sources:
1282             d['application'] = config.VARS.application
1283             f.write("# Application: " + d['application'] + "\n")
1284             if 'KERNEL' in config.APPLICATION.products:
1285                 VersionSalome = src.get_salome_version(config)
1286                 # Case where SALOME has the launcher that uses the SalomeContext API
1287                 if VersionSalome >= 730:
1288                     d['launcher'] = config.APPLICATION.profile.launcher_name
1289                 else:
1290                     d['virtual_app'] = 'runAppli' # this info is not used now)
1291
1292         # write the specific sections
1293         if options.binaries:
1294             f.write(src.template.substitute(readme_template_path_bin, d))
1295             if "virtual_app" in d:
1296                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1297             if "launcher" in d:
1298                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1299
1300         if options.sources:
1301             f.write(src.template.substitute(readme_template_path_src, d))
1302
1303         if options.binaries and options.sources:
1304             f.write(readme_compilation_with_binaries)
1305
1306         if options.project:
1307             f.write(src.template.substitute(readme_template_path_pro, d))
1308
1309         if options.sat:
1310             f.write(src.template.substitute(readme_template_path_sat, d))
1311     
1312     return readme_path
1313
1314 def update_config(config, prop, value):
1315     '''Remove from config.APPLICATION.products the products that have the property given as input.
1316     
1317     :param config Config: The global config.
1318     :param prop str: The property to filter
1319     :param value str: The value of the property to filter
1320     '''
1321     # if there is no APPLICATION (ex sat package -t) : nothing to do
1322     if "APPLICATION" in config:
1323         l_product_to_remove = []
1324         for product_name in config.APPLICATION.products.keys():
1325             prod_cfg = src.product.get_product_config(config, product_name)
1326             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1327                 l_product_to_remove.append(product_name)
1328         for product_name in l_product_to_remove:
1329             config.APPLICATION.products.__delitem__(product_name)
1330
1331 def description():
1332     '''method that is called when salomeTools is called with --help option.
1333     
1334     :return: The text to display for the package command description.
1335     :rtype: str
1336     '''
1337     return _("""
1338 The package command creates a tar file archive of a product.
1339 There are four kinds of archive, which can be mixed:
1340
1341  1 - The binary archive. 
1342      It contains the product installation directories plus a launcher.
1343  2 - The sources archive. 
1344      It contains the product archives, a project (the application plus salomeTools).
1345  3 - The project archive. 
1346      It contains a project (give the project file path as argument).
1347  4 - The salomeTools archive. 
1348      It contains code utility salomeTools.
1349
1350 example:
1351  >> sat package SALOME-master --binaries --sources""")
1352   
1353 def run(args, runner, logger):
1354     '''method that is called when salomeTools is called with package parameter.
1355     '''
1356     
1357     # Parse the options
1358     (options, args) = parser.parse_args(args)
1359
1360     # Check that a type of package is called, and only one
1361     all_option_types = (options.binaries,
1362                         options.sources,
1363                         options.project not in ["", None],
1364                         options.sat)
1365
1366     # Check if no option for package type
1367     if all_option_types.count(True) == 0:
1368         msg = _("Error: Precise a type for the package\nUse one of the "
1369                 "following options: --binaries, --sources, --project or"
1370                 " --salometools")
1371         logger.write(src.printcolors.printcError(msg), 1)
1372         logger.write("\n", 1)
1373         return 1
1374     
1375     # The repository where to put the package if not Binary or Source
1376     package_default_path = runner.cfg.LOCAL.workdir
1377     
1378     # if the package contains binaries or sources:
1379     if options.binaries or options.sources:
1380         # Check that the command has been called with an application
1381         src.check_config_has_application(runner.cfg)
1382
1383         # Display information
1384         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1385                                                     runner.cfg.VARS.application), 1)
1386         
1387         # Get the default directory where to put the packages
1388         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1389         src.ensure_path_exists(package_default_path)
1390         
1391     # if the package contains a project:
1392     if options.project:
1393         # check that the project is visible by SAT
1394         projectNameFile = options.project + ".pyconf"
1395         foundProject = None
1396         for i in runner.cfg.PROJECTS.project_file_paths:
1397             baseName = os.path.basename(i)
1398             if baseName == projectNameFile:
1399                 foundProject = i
1400                 break
1401
1402         if foundProject is None:
1403             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1404             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1405 known projects are:
1406 %(2)s
1407
1408 Please add it in file:
1409 %(3)s""" % \
1410                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1411             logger.write(src.printcolors.printcError(msg), 1)
1412             logger.write("\n", 1)
1413             return 1
1414         else:
1415             options.project_file_path = foundProject
1416             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1417     
1418     # Remove the products that are filtered by the --without_properties option
1419     if options.without_properties:
1420         app = runner.cfg.APPLICATION
1421         logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1422         prop, value = options.without_properties
1423         update_config(runner.cfg, prop, value)
1424         logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1425
1426     # Remove from config the products that have the not_in_package property
1427     update_config(runner.cfg, "not_in_package", "yes")
1428     
1429     # get the name of the archive or build it
1430     if options.name:
1431         if os.path.basename(options.name) == options.name:
1432             # only a name (not a path)
1433             archive_name = options.name           
1434             dir_name = package_default_path
1435         else:
1436             archive_name = os.path.basename(options.name)
1437             dir_name = os.path.dirname(options.name)
1438         
1439         # suppress extension
1440         if archive_name[-len(".tgz"):] == ".tgz":
1441             archive_name = archive_name[:-len(".tgz")]
1442         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1443             archive_name = archive_name[:-len(".tar.gz")]
1444         
1445     else:
1446         archive_name=""
1447         dir_name = package_default_path
1448         if options.binaries or options.sources:
1449             archive_name = runner.cfg.APPLICATION.name
1450
1451         if options.binaries:
1452             archive_name += "-"+runner.cfg.VARS.dist
1453             
1454         if options.sources:
1455             archive_name += "-SRC"
1456             if options.with_vcs:
1457                 archive_name += "-VCS"
1458
1459         if options.sat:
1460             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1461
1462         if options.project:
1463             if options.sat:
1464                 archive_name += "_" 
1465             project_name = options.project
1466             archive_name += ("satproject_" + project_name)
1467  
1468         if len(archive_name)==0: # no option worked 
1469             msg = _("Error: Cannot name the archive\n"
1470                     " check if at least one of the following options was "
1471                     "selected : --binaries, --sources, --project or"
1472                     " --salometools")
1473             logger.write(src.printcolors.printcError(msg), 1)
1474             logger.write("\n", 1)
1475             return 1
1476  
1477     path_targz = os.path.join(dir_name, archive_name + ".tgz")
1478     
1479     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1480
1481     # Create a working directory for all files that are produced during the
1482     # package creation and that will be removed at the end of the command
1483     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1484     src.ensure_path_exists(tmp_working_dir)
1485     logger.write("\n", 5)
1486     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1487     
1488     logger.write("\n", 3)
1489
1490     msg = _("Preparation of files to add to the archive")
1491     logger.write(src.printcolors.printcLabel(msg), 2)
1492     logger.write("\n", 2)
1493     
1494     d_files_to_add={}  # content of the archive
1495
1496     # a dict to hold paths that will need to be substitute for users recompilations
1497     d_paths_to_substitute={}  
1498
1499     if options.binaries:
1500         d_bin_files_to_add = binary_package(runner.cfg,
1501                                             logger,
1502                                             options,
1503                                             tmp_working_dir)
1504         # for all binaries dir, store the substitution that will be required 
1505         # for extra compilations
1506         for key in d_bin_files_to_add:
1507             if key.endswith("(bin)"):
1508                 source_dir = d_bin_files_to_add[key][0]
1509                 path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" +\
1510                    runner.cfg.VARS.dist,runner.cfg.INTERNAL.config.install_dir)
1511                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1512                     # if basename is the same we will just substitute the dirname 
1513                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1514                         os.path.dirname(path_in_archive)
1515                 else:
1516                     d_paths_to_substitute[source_dir]=path_in_archive
1517
1518         d_files_to_add.update(d_bin_files_to_add)
1519
1520     if options.sources:
1521         d_files_to_add.update(source_package(runner,
1522                                         runner.cfg,
1523                                         logger, 
1524                                         options,
1525                                         tmp_working_dir))
1526         if options.binaries:
1527             # for archives with bin and sources we provide a shell script able to 
1528             # install binaries for compilation
1529             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1530                                                       tmp_working_dir,
1531                                                       d_paths_to_substitute,
1532                                                       "install_bin.sh")
1533             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1534             logger.write("substitutions that need to be done later : \n", 5)
1535             logger.write(str(d_paths_to_substitute), 5)
1536             logger.write("\n", 5)
1537     else:
1538         # --salomeTool option is not considered when --sources is selected, as this option
1539         # already brings salomeTool!
1540         if options.sat:
1541             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir, 
1542                                   options, logger))
1543         
1544     if options.project:
1545         DBG.write("config for package %s" % project_name, runner.cfg)
1546         d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1547
1548     if not(d_files_to_add):
1549         msg = _("Error: Empty dictionnary to build the archive!\n")
1550         logger.write(src.printcolors.printcError(msg), 1)
1551         logger.write("\n", 1)
1552         return 1
1553
1554     # Add the README file in the package
1555     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1556     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1557
1558     # Add the additional files of option add_files
1559     if options.add_files:
1560         for file_path in options.add_files:
1561             if not os.path.exists(file_path):
1562                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1563                 continue
1564             file_name = os.path.basename(file_path)
1565             d_files_to_add[file_name] = (file_path, file_name)
1566
1567     logger.write("\n", 2)
1568     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1569     logger.write("\n", 2)
1570     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1571
1572     res = 0
1573     try:
1574         # Creating the object tarfile
1575         tar = tarfile.open(path_targz, mode='w:gz')
1576         
1577         # get the filtering function if needed
1578         filter_function = exclude_VCS_and_extensions
1579
1580         # Add the files to the tarfile object
1581         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1582         tar.close()
1583     except KeyboardInterrupt:
1584         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1585         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1586         # remove the working directory
1587         shutil.rmtree(tmp_working_dir)
1588         logger.write(_("OK"), 1)
1589         logger.write(_("\n"), 1)
1590         return 1
1591     
1592     # case if no application, only package sat as 'sat package -t'
1593     try:
1594         app = runner.cfg.APPLICATION
1595     except:
1596         app = None
1597
1598     # unconditionaly remove the tmp_local_working_dir
1599     if app is not None:
1600         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1601         if os.path.isdir(tmp_local_working_dir):
1602             shutil.rmtree(tmp_local_working_dir)
1603
1604     # remove the tmp directory, unless user has registered as developer
1605     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1606         shutil.rmtree(tmp_working_dir)
1607     
1608     # Print again the path of the package
1609     logger.write("\n", 2)
1610     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1611     
1612     return res