]> SALOME platform Git repositories - tools/sat.git/blob - commands/package.py
Salome HOME
52f8c96a9b7738ca0034bc13984e118b5e1c47e4
[tools/sat.git] / commands / package.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2012  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import stat
21 import shutil
22 import datetime
23 import tarfile
24 import codecs
25 import string
26 import glob
27 import pprint as PP
28 import sys
29 import src
30
31 from application import get_SALOME_modules
32 from  src.versionMinorMajorPatch import MinorMajorPatch as MMP
33 import src.debug as DBG
34
35 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
36
37 BINARY = "binary"
38 SOURCE = "Source"
39 PROJECT = "Project"
40 SAT = "Sat"
41
42 ARCHIVE_DIR = "ARCHIVES"
43 PROJECT_DIR = "PROJECT"
44
45 IGNORED_DIRS = [".git", ".svn"]
46 IGNORED_EXTENSIONS = []
47
48 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
49
50 if src.architecture.is_windows():
51     PROJECT_TEMPLATE = """#!/usr/bin/env python
52 #-*- coding:utf-8 -*-
53
54 # The path to the archive root directory
55 root_path : $PWD + "/../"
56 # path to the PROJECT
57 project_path : $PWD + "/"
58
59 # Where to search the archives of the products
60 ARCHIVEPATH : $root_path + "ARCHIVES"
61 # Where to search the pyconf of the applications
62 APPLICATIONPATH : $project_path + "applications/"
63 # Where to search the pyconf of the products
64 PRODUCTPATH : $project_path + "products/"
65 # Where to search the pyconf of the jobs of the project
66 JOBPATH : $project_path + "jobs/"
67 # Where to search the pyconf of the machines of the project
68 MACHINEPATH : $project_path + "machines/"
69 """
70 else:
71     PROJECT_TEMPLATE = """#!/usr/bin/env python
72 #-*- coding:utf-8 -*-
73
74 # path to the PROJECT
75 project_path : $PWD + "/"
76
77 # Where to search the archives of the products
78 ARCHIVEPATH : $project_path + "ARCHIVES"
79 # Where to search the pyconf of the applications
80 APPLICATIONPATH : $project_path + "applications/"
81 # Where to search the pyconf of the products
82 PRODUCTPATH : $project_path + "products/"
83 # Where to search the pyconf of the jobs of the project
84 JOBPATH : $project_path + "jobs/"
85 # Where to search the pyconf of the machines of the project
86 MACHINEPATH : $project_path + "machines/"
87 """
88
89
90 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
91 #-*- coding:utf-8 -*-
92
93   LOCAL :
94   {
95     base : 'default'
96     workdir : 'default'
97     log_dir : 'default'
98     archive_dir : 'default'
99     VCS : 'unknown'
100     tag : 'unknown'
101   }
102
103 PROJECTS :
104 {
105   project_file_paths :
106   [
107 $LOCAL.workdir + $VARS.sep + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"
108   ]
109 }
110 """)
111
112 # Define all possible option for the package command :  sat package <options>
113 parser = src.options.Options()
114 parser.add_option('b', 'binaries', 'boolean', 'binaries',
115     _('Optional: Produce a binary package.'), False)
116 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
117     _('Optional: Only binary package: produce the archive even if '
118       'there are some missing products.'), False)
119 parser.add_option('s', 'sources', 'boolean', 'sources',
120     _('Optional: Produce a compilable archive of the sources of the '
121       'application.'), False)
122 parser.add_option('', 'bin_products', 'boolean', 'bin_products',
123     _('Optional: Create binary archives for all products.'), False)
124 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
125     _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
126       'Sat prepare will use VCS mode instead to retrieve them.'
127       '\n          Also, when combined with "--bin_products" option, restrict the building of product archives to VCS products.'),
128     False)
129 parser.add_option('', 'ftp', 'boolean', 'ftp',
130     _('Optional: Do not embed archives for products in archive mode.'
131     'Sat prepare will use ftp instead to retrieve them'),
132     False)
133 parser.add_option('e', 'exe', 'string', 'exe',
134     _('Optional: Produce an extra launcher based upon the exe given as argument.'), "")
135 parser.add_option('p', 'project', 'string', 'project',
136     _('Optional: Produce an archive that contains a project.'), "")
137 parser.add_option('t', 'salometools', 'boolean', 'sat',
138     _('Optional: Produce an archive that contains salomeTools.'), False)
139 parser.add_option('n', 'name', 'string', 'name',
140     _('Optional: The name or full path of the archive.'), None)
141 parser.add_option('', 'add_files', 'list2', 'add_files',
142     _('Optional: The list of additional files to add to the archive.'), [])
143 parser.add_option('', 'without_properties', 'properties', 'without_properties',
144     _('Optional: Filter the products by their properties.\n\tSyntax: '
145       '--without_properties <property>:<value>'))
146
147
148 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
149     '''Create an archive containing all directories and files that are given in
150        the d_content argument.
151
152     :param tar tarfile: The tarfile instance used to make the archive.
153     :param name_archive str: The name of the archive to make.
154     :param d_content dict: The dictionary that contain all directories and files
155                            to add in the archive.
156                            d_content[label] =
157                                         (path_on_local_machine, path_in_archive)
158     :param logger Logger: the logging instance
159     :param f_exclude Function: the function that filters
160     :return: 0 if success, 1 if not.
161     :rtype: int
162     '''
163     # get the max length of the messages in order to make the display
164     max_len = len(max(d_content.keys(), key=len))
165
166     success = 0
167     # loop over each directory or file stored in the d_content dictionary
168     names = sorted(d_content.keys())
169     DBG.write("add tar names", names)
170
171     # used to avoid duplications (for pip install in python, or single_install_dir cases)
172     already_added=set()
173     for name in names:
174         # display information
175         len_points = max_len - len(name) + 3
176         local_path, archive_path = d_content[name]
177         in_archive = os.path.join(name_archive, archive_path)
178         logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
179         # Get the local path and the path in archive
180         # of the directory or file to add
181         # Add it in the archive
182         try:
183             key=local_path+"->"+in_archive
184             if key not in already_added:
185                 if old_python:
186                     tar.add(local_path,
187                                  arcname=in_archive,
188                                  exclude=exclude_VCS_and_extensions_26)
189                 else:
190                     tar.add(local_path,
191                                  arcname=in_archive,
192                                  filter=exclude_VCS_and_extensions)
193                 already_added.add(key)
194             logger.write(src.printcolors.printcSuccess(_("OK")), 3)
195         except Exception as e:
196             logger.write(src.printcolors.printcError(_("KO ")), 3)
197             logger.write(str(e), 3)
198             success = 1
199         logger.write("\n", 3)
200     return success
201
202
203 def exclude_VCS_and_extensions_26(filename):
204     ''' The function that is used to exclude from package the link to the
205         VCS repositories (like .git) (only for python 2.6)
206
207     :param filename Str: The filname to exclude (or not).
208     :return: True if the file has to be exclude
209     :rtype: Boolean
210     '''
211     for dir_name in IGNORED_DIRS:
212         if dir_name in filename:
213             return True
214     for extension in IGNORED_EXTENSIONS:
215         if filename.endswith(extension):
216             return True
217     return False
218
219 def exclude_VCS_and_extensions(tarinfo):
220     ''' The function that is used to exclude from package the link to the
221         VCS repositories (like .git)
222
223     :param filename Str: The filname to exclude (or not).
224     :return: None if the file has to be exclude
225     :rtype: tarinfo or None
226     '''
227     filename = tarinfo.name
228     for dir_name in IGNORED_DIRS:
229         if dir_name in filename:
230             return None
231     for extension in IGNORED_EXTENSIONS:
232         if filename.endswith(extension):
233             return None
234     return tarinfo
235
236 def produce_relative_launcher(config,
237                               logger,
238                               file_dir,
239                               file_name,
240                               binaries_dir_name):
241     '''Create a specific SALOME launcher for the binary package. This launcher
242        uses relative paths.
243
244     :param config Config: The global configuration.
245     :param logger Logger: the logging instance
246     :param file_dir str: the directory where to put the launcher
247     :param file_name str: The launcher name
248     :param binaries_dir_name str: the name of the repository where the binaries
249                                   are, in the archive.
250     :return: the path of the produced launcher
251     :rtype: str
252     '''
253
254     # set base mode to "no" for the archive - save current mode to restore it at the end
255     if "base" in config.APPLICATION:
256         base_setting=config.APPLICATION.base
257     else:
258         base_setting="maybe"
259     config.APPLICATION.base="no"
260
261     # get KERNEL installation path
262     kernel_info = src.product.get_product_config(config, "KERNEL")
263     kernel_base_name=os.path.basename(kernel_info.install_dir)
264     if kernel_info.install_mode == "base":
265         # case of kernel installed in base. the kernel install dir name is different in the archive
266         kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
267
268     kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
269
270     # set kernel bin dir (considering fhs property)
271     kernel_cfg = src.product.get_product_config(config, "KERNEL")
272     if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
273         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
274     else:
275         bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
276
277     # check if the application contains an application module
278     # check also if the application has a distene product,
279     # in this case get its licence file name
280     l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
281     salome_application_name="Not defined"
282     distene_licence_file_name=False
283     for prod_name, prod_info in l_product_info:
284         # look for a "salome application" and a distene product
285         if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
286             distene_licence_file_name = src.product.product_has_licence(prod_info,
287                                             config.PATHS.LICENCEPATH)
288         if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
289             salome_application_name=prod_info.name
290
291     # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
292     # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
293     if salome_application_name == "Not defined":
294         app_root_dir=kernel_root_dir
295     else:
296         app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
297
298     additional_env={}
299     additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
300                                                    config.VARS.sep + bin_kernel_install_dir
301     if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
302         additional_env['sat_python_version'] = 3
303     else:
304         additional_env['sat_python_version'] = 2
305
306     additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
307     launcher_name = src.get_launcher_name(config)
308     additional_env['APPLI'] = "out_dir_Path" + config.VARS.sep + file_name
309
310     # create an environment file writer
311     writer = src.environment.FileEnvWriter(config,
312                                            logger,
313                                            file_dir,
314                                            src_root=None,
315                                            env_info=None)
316
317     filepath = os.path.join(file_dir, file_name)
318     # Write
319     writer.write_env_file(filepath,
320                           False,  # for launch
321                           "cfgForPy",
322                           additional_env=additional_env,
323                           no_path_init=False,
324                           for_package = binaries_dir_name)
325
326     # Little hack to put out_dir_Path outside the strings
327     src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
328     src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
329
330     # A hack to put a call to a file for distene licence.
331     # It does nothing to an application that has no distene product
332     if distene_licence_file_name:
333         logger.write("Application has a distene licence file! We use it in package launcher", 5)
334         hack_for_distene_licence(filepath, distene_licence_file_name)
335
336     # change the rights in order to make the file executable for everybody
337     os.chmod(filepath,
338              stat.S_IRUSR |
339              stat.S_IRGRP |
340              stat.S_IROTH |
341              stat.S_IWUSR |
342              stat.S_IXUSR |
343              stat.S_IXGRP |
344              stat.S_IXOTH)
345
346     # restore modified setting by its initial value
347     config.APPLICATION.base=base_setting
348
349     return filepath
350
351 def hack_for_distene_licence(filepath, licence_file):
352     '''Replace the distene licence env variable by a call to a file.
353
354     :param filepath Str: The path to the launcher to modify.
355     '''
356     shutil.move(filepath, filepath + "_old")
357     fileout= filepath
358     filein = filepath + "_old"
359     fin = open(filein, "r")
360     fout = open(fileout, "w")
361     text = fin.readlines()
362     # Find the Distene section
363     num_line = -1
364     for i,line in enumerate(text):
365         if "# Set DISTENE License" in line:
366             num_line = i
367             break
368     if num_line == -1:
369         # No distene product, there is nothing to do
370         fin.close()
371         for line in text:
372             fout.write(line)
373         fout.close()
374         return
375     del text[num_line +1]
376     del text[num_line +1]
377     text_to_insert ="""    try:
378         distene_licence_file=r"%s"
379         if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
380             import importlib.util
381             spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
382             distene=importlib.util.module_from_spec(spec_dist)
383             spec_dist.loader.exec_module(distene)
384         else:
385             import imp
386             distene = imp.load_source('distene_licence', distene_licence_file)
387         distene.set_distene_variables(context)
388     except:
389         pass\n"""  % licence_file
390     text.insert(num_line + 1, text_to_insert)
391     for line in text:
392         fout.write(line)
393     fin.close()
394     fout.close()
395     return
396
397 def produce_relative_env_files(config,
398                               logger,
399                               file_dir,
400                               binaries_dir_name,
401                               exe_name=None):
402     '''Create some specific environment files for the binary package. These
403        files use relative paths.
404
405     :param config Config: The global configuration.
406     :param logger Logger: the logging instance
407     :param file_dir str: the directory where to put the files
408     :param binaries_dir_name str: the name of the repository where the binaries
409                                   are, in the archive.
410     :param exe_name str: if given generate a launcher executing exe_name
411     :return: the list of path of the produced environment files
412     :rtype: List
413     '''
414
415     # set base mode to "no" for the archive - save current mode to restore it at the end
416     if "base" in config.APPLICATION:
417         base_setting=config.APPLICATION.base
418     else:
419         base_setting="maybe"
420     config.APPLICATION.base="no"
421
422     # create an environment file writer
423     writer = src.environment.FileEnvWriter(config,
424                                            logger,
425                                            file_dir,
426                                            src_root=None)
427
428     if src.architecture.is_windows():
429       shell = "bat"
430       filename  = "env_launch.bat"
431     else:
432       shell = "bash"
433       filename  = "env_launch.sh"
434
435     if exe_name:
436         filename=os.path.basename(exe_name)
437
438     # Write
439     filepath = writer.write_env_file(filename,
440                           False, # for launch
441                           shell,
442                           for_package = binaries_dir_name)
443
444     # Little hack to put out_dir_Path as environment variable
445     if src.architecture.is_windows() :
446       src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
447       src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
448       src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
449     else:
450       src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
451       src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
452       src.replace_in_file(filepath, ';out_dir_Path', ';${out_dir_Path}' )
453
454     if exe_name:
455         if src.architecture.is_windows():
456             cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
457         else:
458             cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
459         with open(filepath, "a") as exe_launcher:
460             exe_launcher.write(cmd)
461
462     # change the rights in order to make the file executable for everybody
463     os.chmod(filepath,
464              stat.S_IRUSR |
465              stat.S_IRGRP |
466              stat.S_IROTH |
467              stat.S_IWUSR |
468              stat.S_IXUSR |
469              stat.S_IXGRP |
470              stat.S_IXOTH)
471
472     # restore modified setting by its initial value
473     config.APPLICATION.base=base_setting
474
475     return filepath
476
477 def produce_install_bin_file(config,
478                              logger,
479                              file_dir,
480                              d_sub,
481                              file_name):
482     '''Create a bash shell script which do substitutions in BIRARIES dir
483        in order to use it for extra compilations.
484
485     :param config Config: The global configuration.
486     :param logger Logger: the logging instance
487     :param file_dir str: the directory where to put the files
488     :param d_sub, dict: the dictionnary that contains the substitutions to be done
489     :param file_name str: the name of the install script file
490     :return: the produced file
491     :rtype: str
492     '''
493     # Write
494     filepath = os.path.join(file_dir, file_name)
495     # open the file and write into it
496     # use codec utf-8 as sat variables are in unicode
497     with codecs.open(filepath, "w", 'utf-8') as installbin_file:
498         installbin_template_path = os.path.join(config.VARS.internal_dir,
499                                         "INSTALL_BIN.template")
500
501         # build the name of the directory that will contain the binaries
502         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
503         # build the substitution loop
504         loop_cmd = "for f in $(grep -RIl"
505         for key in d_sub:
506             loop_cmd += " -e "+ key
507         loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
508                     '); do\n     sed -i "\n'
509         for key in d_sub:
510             loop_cmd += "        s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
511         loop_cmd += '            " $f\ndone'
512
513         d={}
514         d["BINARIES_DIR"] = binaries_dir_name
515         d["SUBSTITUTION_LOOP"]=loop_cmd
516         d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
517
518         # substitute the template and write it in file
519         content=src.template.substitute(installbin_template_path, d)
520         installbin_file.write(content)
521         # change the rights in order to make the file executable for everybody
522         os.chmod(filepath,
523                  stat.S_IRUSR |
524                  stat.S_IRGRP |
525                  stat.S_IROTH |
526                  stat.S_IWUSR |
527                  stat.S_IXUSR |
528                  stat.S_IXGRP |
529                  stat.S_IXOTH)
530
531     return filepath
532
533 def product_appli_creation_script(config,
534                                   logger,
535                                   file_dir,
536                                   binaries_dir_name):
537     '''Create a script that can produce an application (EDF style) in the binary
538        package.
539
540     :param config Config: The global configuration.
541     :param logger Logger: the logging instance
542     :param file_dir str: the directory where to put the file
543     :param binaries_dir_name str: the name of the repository where the binaries
544                                   are, in the archive.
545     :return: the path of the produced script file
546     :rtype: Str
547     '''
548     template_name = "create_appli.py.for_bin_packages.template"
549     template_path = os.path.join(config.VARS.internal_dir, template_name)
550     text_to_fill = open(template_path, "r").read()
551     text_to_fill = text_to_fill.replace("TO BE FILLED 1",
552                                         '"' + binaries_dir_name + '"')
553
554     text_to_add = ""
555     for product_name in get_SALOME_modules(config):
556         product_info = src.product.get_product_config(config, product_name)
557
558         if src.product.product_is_smesh_plugin(product_info):
559             continue
560
561         if 'install_dir' in product_info and bool(product_info.install_dir):
562             if src.product.product_is_cpp(product_info):
563                 # cpp module
564                 for cpp_name in src.product.get_product_components(product_info):
565                     line_to_add = ("<module name=\"" +
566                                    cpp_name +
567                                    "\" gui=\"yes\" path=\"''' + "
568                                    "os.path.join(dir_bin_name, \"" +
569                                    cpp_name + "\") + '''\"/>")
570             else:
571                 # regular module
572                 line_to_add = ("<module name=\"" +
573                                product_name +
574                                "\" gui=\"yes\" path=\"''' + "
575                                "os.path.join(dir_bin_name, \"" +
576                                product_name + "\") + '''\"/>")
577             text_to_add += line_to_add + "\n"
578
579     filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
580
581     tmp_file_path = os.path.join(file_dir, "create_appli.py")
582     ff = open(tmp_file_path, "w")
583     ff.write(filled_text)
584     ff.close()
585
586     # change the rights in order to make the file executable for everybody
587     os.chmod(tmp_file_path,
588              stat.S_IRUSR |
589              stat.S_IRGRP |
590              stat.S_IROTH |
591              stat.S_IWUSR |
592              stat.S_IXUSR |
593              stat.S_IXGRP |
594              stat.S_IXOTH)
595
596     return tmp_file_path
597
598 def bin_products_archives(config, logger, only_vcs):
599     '''Prepare binary packages for all products
600     :param config Config: The global configuration.
601     :return: the error status
602     :rtype: bool
603     '''
604
605     logger.write("Make %s binary archives\n" % config.VARS.dist)
606     # Get the default directory where to put the packages
607     binpackage_path = os.path.join(config.APPLICATION.workdir, "PACKAGE", "products")
608     src.ensure_path_exists(binpackage_path)
609     # Get the list of product installation to add to the archive
610     l_products_name = sorted(config.APPLICATION.products.keys())
611     l_product_info = src.product.get_products_infos(l_products_name,
612                                                     config)
613     # first loop on products : filter products, analyse properties,
614     # and store the information that will be used to create the archive in the second loop
615     l_not_installed=[] # store not installed products for warning at the end
616     for prod_name, prod_info in l_product_info:
617         # ignore the native and fixed products for install directories
618         if (src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
619                 or src.product.product_is_native(prod_info)
620                 or src.product.product_is_fixed(prod_info)
621                 or not src.product.product_compiles(prod_info)):
622             continue
623         if only_vcs and not src.product.product_is_vcs(prod_info):
624             continue
625         if not src.product.check_installation(config, prod_info):
626             l_not_installed.append(prod_name)
627             continue  # product is not installed, we skip it
628         # prepare call to make_bin_archive
629         path_targz_prod = os.path.join(binpackage_path, prod_name + '-' + prod_info.version.replace("/", "_") + "-" + config.VARS.dist + PACKAGE_EXT)
630         targz_prod = tarfile.open(path_targz_prod, mode='w:gz')
631         bin_path = prod_info.install_dir
632         targz_prod.add(bin_path)
633         targz_prod.close()
634         # Python program to find MD5 hash value of a file
635         import hashlib
636         with open(path_targz_prod,"rb") as f:
637             bytes = f.read() # read file as bytes
638             readable_hash = hashlib.md5(bytes).hexdigest();
639             with open(path_targz_prod+".md5", "w") as md5sum:
640                md5sum.write("%s  %s" % (readable_hash, os.path.basename(path_targz_prod)))
641             logger.write("   archive : %s   (md5sum = %s)\n" % (path_targz_prod, readable_hash))
642
643     return 0
644
645 def binary_package(config, logger, options, tmp_working_dir):
646     '''Prepare a dictionary that stores all the needed directories and files to
647        add in a binary package.
648
649     :param config Config: The global configuration.
650     :param logger Logger: the logging instance
651     :param options OptResult: the options of the launched command
652     :param tmp_working_dir str: The temporary local directory containing some
653                                 specific directories or files needed in the
654                                 binary package
655     :return: the dictionary that stores all the needed directories and files to
656              add in a binary package.
657              {label : (path_on_local_machine, path_in_archive)}
658     :rtype: dict
659     '''
660
661     # Get the list of product installation to add to the archive
662     l_products_name = sorted(config.APPLICATION.products.keys())
663     l_product_info = src.product.get_products_infos(l_products_name,
664                                                     config)
665
666     # suppress compile time products for binaries-only archives
667     if not options.sources:
668         update_config(config, logger, "compile_time", "yes")
669
670     l_install_dir = []
671     l_source_dir = []
672     l_not_installed = []
673     l_sources_not_present = []
674     generate_mesa_launcher = False  # a flag to know if we generate a mesa launcher
675     if ("APPLICATION" in config  and
676         "properties"  in config.APPLICATION  and
677         "mesa_launcher_in_package"    in config.APPLICATION.properties  and
678         config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
679             generate_mesa_launcher=True
680
681     has_properties  = "APPLICATION" in config and "properties" in config.APPLICATION
682     # first loop on products : filter products, analyse properties,
683     # and store the information that will be used to create the archive in the second loop
684     for prod_name, prod_info in l_product_info:
685         # skip product with property not_in_package set to yes
686         if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
687             continue
688
689         if src.product.product_is_not_opensource(prod_info) and src.check_git_server_has_non_opensource( cfg, git_server):
690             continue
691
692         # Add the sources of the products that have the property
693         # sources_in_package : "yes"
694         if src.get_property_in_product_cfg(prod_info,
695                                            "sources_in_package") == "yes":
696             if os.path.exists(prod_info.source_dir):
697                 l_source_dir.append((prod_name, prod_info.source_dir))
698             else:
699                 l_sources_not_present.append(prod_name)
700
701         # ignore the native and fixed products for install directories
702         if (src.product.product_is_native(prod_info)
703                 or src.product.product_is_fixed(prod_info)
704                 or not src.product.product_compiles(prod_info)):
705             continue
706         #
707         # products with single_dir property will be installed in the PRODUCTS directory of the archive
708         is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
709                        src.product.product_test_property(prod_info,"single_install_dir", "yes"))
710         if src.product.check_installation(config, prod_info):
711             l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir,
712                                   is_single_dir, prod_info.install_mode))
713         else:
714             l_not_installed.append(prod_name)
715
716         # Add also the cpp generated modules (if any)
717         if src.product.product_is_cpp(prod_info):
718             # cpp module
719             for name_cpp in src.product.get_product_components(prod_info):
720                 install_dir = os.path.join(config.APPLICATION.workdir,
721                                            config.INTERNAL.config.install_dir,
722                                            name_cpp)
723                 if os.path.exists(install_dir):
724                     l_install_dir.append((name_cpp, name_cpp, install_dir, False, "value"))
725                 else:
726                     l_not_installed.append(name_cpp)
727
728     # check the name of the directory that (could) contains the binaries
729     # from previous detar
730     binaries_from_detar = os.path.join(
731                               config.APPLICATION.workdir,
732                               config.INTERNAL.config.binary_dir + config.VARS.dist)
733     if os.path.exists(binaries_from_detar):
734          logger.write("""
735 WARNING: existing binaries directory from previous detar installation:
736          %s
737          To make new package from this, you have to:
738          1) install binaries in INSTALL directory with the script "install_bin.sh"
739             see README file for more details
740          2) or recompile everything in INSTALL with "sat compile" command
741             this step is long, and requires some linux packages to be installed
742             on your system\n
743 """ % binaries_from_detar)
744
745     # Print warning or error if there are some missing products
746     if len(l_not_installed) > 0:
747         text_missing_prods = ""
748         for p_name in l_not_installed:
749             text_missing_prods += " - " + p_name + "\n"
750         if not options.force_creation:
751             msg = _("ERROR: there are missing product installations:")
752             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
753                                      text_missing_prods),
754                          1)
755             raise src.SatException(msg)
756         else:
757             msg = _("WARNING: there are missing products installations:")
758             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
759                                      text_missing_prods),
760                          1)
761
762     # Do the same for sources
763     if len(l_sources_not_present) > 0:
764         text_missing_prods = ""
765         for p_name in l_sources_not_present:
766             text_missing_prods += "-" + p_name + "\n"
767         if not options.force_creation:
768             msg = _("ERROR: there are missing product sources:")
769             logger.write("%s\n%s" % (src.printcolors.printcError(msg),
770                                      text_missing_prods),
771                          1)
772             raise src.SatException(msg)
773         else:
774             msg = _("WARNING: there are missing products sources:")
775             logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
776                                      text_missing_prods),
777                          1)
778
779     # construct the name of the directory that will contain the binaries
780     if src.architecture.is_windows():
781         binaries_dir_name = config.INTERNAL.config.binary_dir
782     else:
783         binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
784     # construct the correlation table between the product names, there
785     # actual install directories and there install directory in archive
786     d_products = {}
787     for prod_name, prod_info_name, install_dir, is_single_dir, install_mode in l_install_dir:
788         prod_base_name=os.path.basename(install_dir)
789         if install_mode == "base":
790             # case of a products installed in base.
791             # because the archive is in base:no mode, the name of the install dir is different inside archive
792             # we set it to the product name or by PRODUCTS if single-dir
793             if is_single_dir:
794                 prod_base_name=config.INTERNAL.config.single_install_dir
795             else:
796                 prod_base_name=prod_info_name
797         path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
798         d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
799
800     for prod_name, source_dir in l_source_dir:
801         path_in_archive = os.path.join("SOURCES", prod_name)
802         d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
803
804     # create an archives of compilation logs, and insert it into the tarball
805     logpath=os.path.join(config.APPLICATION.workdir, "LOGS")
806     path_targz_logs = os.path.join(tmp_working_dir, "logs.tgz")
807     tar_log = tarfile.open(path_targz_logs, mode='w:gz')
808     tar_log.add(logpath, arcname="LOGS")
809     tar_log.close()
810     d_products["LOGS"] = (path_targz_logs, "logs.tgz")
811
812     # for packages of SALOME applications including KERNEL,
813     # we produce a salome launcher or a virtual application (depending on salome version)
814     if 'KERNEL' in config.APPLICATION.products:
815         VersionSalome = src.get_salome_version(config)
816         # Case where SALOME has the launcher that uses the SalomeContext API
817         if VersionSalome >= MMP([7,3,0]):
818             # create the relative launcher and add it to the files to add
819             launcher_name = src.get_launcher_name(config)
820             launcher_package = produce_relative_launcher(config,
821                                                  logger,
822                                                  tmp_working_dir,
823                                                  launcher_name,
824                                                  binaries_dir_name)
825             d_products["launcher"] = (launcher_package, launcher_name)
826
827             # if the application contains mesa products, we generate in addition to the
828             # classical salome launcher a launcher using mesa and called mesa_salome
829             # (the mesa launcher will be used for remote usage through ssh).
830             if generate_mesa_launcher:
831                 #if there is one : store the use_mesa property
832                 restore_use_mesa_option=None
833                 if ('properties' in config.APPLICATION and
834                     'use_mesa' in config.APPLICATION.properties):
835                     restore_use_mesa_option = config.APPLICATION.properties.use_mesa
836
837                 # activate mesa property, and generate a mesa launcher
838                 src.activate_mesa_property(config)  #activate use_mesa property
839                 launcher_mesa_name="mesa_"+launcher_name
840                 launcher_package_mesa = produce_relative_launcher(config,
841                                                      logger,
842                                                      tmp_working_dir,
843                                                      launcher_mesa_name,
844                                                      binaries_dir_name)
845                 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
846
847                 # if there was a use_mesa value, we restore it
848                 # else we set it to the default value "no"
849                 if restore_use_mesa_option != None:
850                     config.APPLICATION.properties.use_mesa=restore_use_mesa_option
851                 else:
852                     config.APPLICATION.properties.use_mesa="no"
853
854             if options.sources:
855                 # if we mix binaries and sources, we add a copy of the launcher,
856                 # prefixed  with "bin",in order to avoid clashes
857                 launcher_copy_name="bin"+launcher_name
858                 launcher_package_copy = produce_relative_launcher(config,
859                                                      logger,
860                                                      tmp_working_dir,
861                                                      launcher_copy_name,
862                                                      binaries_dir_name)
863                 d_products["launcher (copy)"] = (launcher_package_copy, launcher_copy_name)
864         else:
865             # Provide a script for the creation of an application EDF style
866             appli_script = product_appli_creation_script(config,
867                                                         logger,
868                                                         tmp_working_dir,
869                                                         binaries_dir_name)
870
871             d_products["appli script"] = (appli_script, "create_appli.py")
872
873     # Put also the environment file
874     env_file = produce_relative_env_files(config,
875                                            logger,
876                                            tmp_working_dir,
877                                            binaries_dir_name)
878
879     if src.architecture.is_windows():
880       filename  = "env_launch.bat"
881     else:
882       filename  = "env_launch.sh"
883     d_products["environment file"] = (env_file, filename)
884
885     # If option exe, produce an extra launcher based on specified exe
886     if options.exe:
887         exe_file = produce_relative_env_files(config,
888                                               logger,
889                                               tmp_working_dir,
890                                               binaries_dir_name,
891                                               options.exe)
892
893         if src.architecture.is_windows():
894           filename  = os.path.basename(options.exe) + ".bat"
895         else:
896           filename  = os.path.basename(options.exe) + ".sh"
897         d_products["exe file"] = (exe_file, filename)
898
899
900     return d_products
901
902 def source_package(sat, config, logger, options, tmp_working_dir):
903     '''Prepare a dictionary that stores all the needed directories and files to
904        add in a source package.
905
906     :param config Config: The global configuration.
907     :param logger Logger: the logging instance
908     :param options OptResult: the options of the launched command
909     :param tmp_working_dir str: The temporary local directory containing some
910                                 specific directories or files needed in the
911                                 binary package
912     :return: the dictionary that stores all the needed directories and files to
913              add in a source package.
914              {label : (path_on_local_machine, path_in_archive)}
915     :rtype: dict
916     '''
917
918     d_archives={}
919     # Get all the products that are prepared using an archive
920     # unless ftp mode is specified (in this case the user of the
921     # archive will get the sources through the ftp mode of sat prepare
922     if not options.ftp:
923         logger.write("Find archive products ... ")
924         d_archives, l_pinfo_vcs = get_archives(config, logger)
925         logger.write("Done\n")
926
927     d_archives_vcs = {}
928     if not options.with_vcs and len(l_pinfo_vcs) > 0:
929         # Make archives with the products that are not prepared using an archive
930         # (git, cvs, svn, etc)
931         logger.write("Construct archives for vcs products ... ")
932         d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
933                                           sat,
934                                           config,
935                                           logger,
936                                           tmp_working_dir)
937         logger.write("Done\n")
938
939     # Create a project
940     logger.write("Create the project ... ")
941     d_project = create_project_for_src_package(config,
942                                                tmp_working_dir,
943                                                options.with_vcs,
944                                                options.ftp)
945     logger.write("Done\n")
946
947     # Add salomeTools
948     tmp_sat = add_salomeTools(config, tmp_working_dir)
949     d_sat = {"salomeTools" : (tmp_sat, "sat")}
950
951     # Add a sat symbolic link if not win
952     if not src.architecture.is_windows():
953         try:
954             t = os.getcwd()
955         except:
956             # In the jobs, os.getcwd() can fail
957             t = config.LOCAL.workdir
958         os.chdir(tmp_working_dir)
959
960         # create a symlink, to avoid reference with "salomeTool/.."
961         os.chdir("PROJECT")
962         if os.path.lexists("ARCHIVES"):
963             os.remove("ARCHIVES")
964         os.symlink("../ARCHIVES", "ARCHIVES")
965         os.chdir(t)
966
967         d_sat["sat archive link"] = (os.path.join(tmp_working_dir,"PROJECT", "ARCHIVES"),
968                                      os.path.join("PROJECT", "ARCHIVES"))
969
970     d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
971     return d_source
972
973 def get_archives(config, logger):
974     '''Find all the products that are get using an archive and all the products
975        that are get using a vcs (git, cvs, svn) repository.
976
977     :param config Config: The global configuration.
978     :param logger Logger: the logging instance
979     :return: the dictionary {name_product :
980              (local path of its archive, path in the package of its archive )}
981              and the list of specific configuration corresponding to the vcs
982              products
983     :rtype: (Dict, List)
984     '''
985     # Get the list of product informations
986     l_products_name = config.APPLICATION.products.keys()
987     l_product_info = src.product.get_products_infos(l_products_name,
988                                                     config)
989     d_archives = {}
990     l_pinfo_vcs = []
991     for p_name, p_info in l_product_info:
992         # skip product with property not_in_package set to yes
993         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
994             continue
995         # ignore the native and fixed products
996         if (src.product.product_is_native(p_info)
997                 or src.product.product_is_fixed(p_info)):
998             continue
999
1000         # skip product if git server misses non opensource products
1001         is_not_prod_opensource       = src.product.product_is_not_opensource(p_info)
1002         git_server = src.get_git_server(config,logger)
1003         has_git_server_non_opensource = src.check_git_server_has_non_opensource( config, git_server)
1004         if has_git_server_non_opensource and is_not_prod_opensource:
1005             logger.warning("%s is a closed-source software and is not available on %s" % (product, git_server))
1006             logger.flush()
1007             continue
1008
1009         if p_info.get_source == "archive":
1010             archive_path = p_info.archive_info.archive_name
1011             archive_name = os.path.basename(archive_path)
1012             d_archives[p_name] = (archive_path,
1013                                   os.path.join(ARCHIVE_DIR, archive_name))
1014             if (src.appli_test_property(config,"pip", "yes") and
1015                 src.product.product_test_property(p_info,"pip", "yes")):
1016                 # if pip mode is activated, and product is managed by pip
1017                 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
1018                 if "archive_prefix" in p_info.archive_info and p_info.archive_info.archive_prefix:
1019                     pip_wheel_pattern=os.path.join(pip_wheels_dir,
1020                                                    "%s-%s*" % (p_info.archive_info.archive_prefix, p_info.version))
1021                 else:
1022                     pip_wheel_pattern=os.path.join(pip_wheels_dir,
1023                                                    "%s-%s*" % (p_info.name, p_info.version))
1024                 pip_wheel_path=glob.glob(pip_wheel_pattern)
1025                 msg_pip_not_found="Error in get_archive, pip wheel for "\
1026                                   "product %s-%s was not found in %s directory"
1027                 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
1028                                   "product %s-%s were found in %s directory"
1029                 if len(pip_wheel_path)==0:
1030                     raise src.SatException(msg_pip_not_found %\
1031                         (p_info.name, p_info.version, pip_wheels_dir))
1032                 if len(pip_wheel_path)>1:
1033                     raise src.SatException(msg_pip_two_or_more %\
1034                         (p_info.name, p_info.version, pip_wheels_dir))
1035
1036                 pip_wheel_name=os.path.basename(pip_wheel_path[0])
1037                 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0],
1038                     os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
1039         else:
1040             # this product is not managed by archive,
1041             # an archive of the vcs directory will be created by get_archive_vcs
1042             l_pinfo_vcs.append((p_name, p_info))
1043
1044     return d_archives, l_pinfo_vcs
1045
1046 def add_salomeTools(config, tmp_working_dir):
1047     '''Prepare a version of salomeTools that has a specific local.pyconf file
1048        configured for a source package.
1049
1050     :param config Config: The global configuration.
1051     :param tmp_working_dir str: The temporary local directory containing some
1052                                 specific directories or files needed in the
1053                                 source package
1054     :return: The path to the local salomeTools directory to add in the package
1055     :rtype: str
1056     '''
1057     # Copy sat in the temporary working directory
1058     sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
1059     sat_running_path = src.Path(config.VARS.salometoolsway)
1060     sat_running_path.copy(sat_tmp_path)
1061
1062     # Update the local.pyconf file that contains the path to the project
1063     local_pyconf_name = "local.pyconf"
1064     local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
1065     local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
1066     # Remove the .pyconf file in the root directory of salomeTools if there is
1067     # any. (For example when launching jobs, a pyconf file describing the jobs
1068     # can be here and is not useful)
1069     files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
1070     for file_or_dir in files_or_dir_SAT:
1071         if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
1072             file_path = os.path.join(tmp_working_dir,
1073                                      "salomeTools",
1074                                      file_or_dir)
1075             os.remove(file_path)
1076
1077     ff = open(local_pyconf_file, "w")
1078     ff.write(LOCAL_TEMPLATE)
1079     ff.close()
1080
1081     return sat_tmp_path.path
1082
1083 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
1084     '''For sources package that require that all products are get using an
1085        archive, one has to create some archive for the vcs products.
1086        So this method calls the clean and source command of sat and then create
1087        the archives.
1088
1089     :param l_pinfo_vcs List: The list of specific configuration corresponding to
1090                              each vcs product
1091     :param sat Sat: The Sat instance that can be called to clean and source the
1092                     products
1093     :param config Config: The global configuration.
1094     :param logger Logger: the logging instance
1095     :param tmp_working_dir str: The temporary local directory containing some
1096                                 specific directories or files needed in the
1097                                 source package
1098     :return: the dictionary that stores all the archives to add in the source
1099              package. {label : (path_on_local_machine, path_in_archive)}
1100     :rtype: dict
1101     '''
1102     # clean the source directory of all the vcs products, then use the source
1103     # command and thus construct an archive that will not contain the patches
1104     l_prod_names = [pn for pn, __ in l_pinfo_vcs]
1105     if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
1106       logger.write(_("\nclean sources\n"))
1107       args_clean = config.VARS.application
1108       args_clean += " --sources --products "
1109       args_clean += ",".join(l_prod_names)
1110       logger.write("WARNING: get_archives_vcs clean\n         '%s'\n" % args_clean, 1)
1111       sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
1112     if True:
1113       # source
1114       logger.write(_("get sources\n"))
1115       args_source = config.VARS.application
1116       args_source += " --products "
1117       args_source += ",".join(l_prod_names)
1118       svgDir = sat.cfg.APPLICATION.workdir
1119       tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package")  # to avoid too much big files in /tmp
1120       sat.cfg.APPLICATION.workdir = tmp_local_working_dir
1121       # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
1122       # DBG.write("sat config id", id(sat.cfg), True)
1123       # shit as config is not same id() as for sat.source()
1124       # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
1125       import source
1126       source.run(args_source, sat, logger) #use this mode as runner.cfg reference
1127
1128       # make the new archives
1129       d_archives_vcs = {}
1130       for pn, pinfo in l_pinfo_vcs:
1131           path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
1132           logger.write("make archive vcs '%s'\n" % path_archive)
1133           d_archives_vcs[pn] = (path_archive,
1134                                 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
1135       sat.cfg.APPLICATION.workdir = svgDir
1136       # DBG.write("END sat config", sat.cfg.APPLICATION, True)
1137     return d_archives_vcs
1138
1139 def make_bin_archive(prod_name, prod_info, where):
1140     '''Create an archive of a product by searching its source directory.
1141
1142     :param prod_name str: The name of the product.
1143     :param prod_info Config: The specific configuration corresponding to the
1144                              product
1145     :param where str: The path of the repository where to put the resulting
1146                       archive
1147     :return: The path of the resulting archive
1148     :rtype: str
1149     '''
1150     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1151     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1152     bin_path = prod_info.install_dir
1153     tar_prod.add(bin_path, arcname=path_targz_prod)
1154     tar_prod.close()
1155     return path_targz_prod
1156
1157 def make_archive(prod_name, prod_info, where):
1158     '''Create an archive of a product by searching its source directory.
1159
1160     :param prod_name str: The name of the product.
1161     :param prod_info Config: The specific configuration corresponding to the
1162                              product
1163     :param where str: The path of the repository where to put the resulting
1164                       archive
1165     :return: The path of the resulting archive
1166     :rtype: str
1167     '''
1168     path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1169     tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1170     local_path = prod_info.source_dir
1171     if old_python:
1172         tar_prod.add(local_path,
1173                      arcname=prod_name,
1174                      exclude=exclude_VCS_and_extensions_26)
1175     else:
1176         tar_prod.add(local_path,
1177                      arcname=prod_name,
1178                      filter=exclude_VCS_and_extensions)
1179     tar_prod.close()
1180     return path_targz_prod
1181
1182 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
1183     '''Create a specific project for a source package.
1184
1185     :param config Config: The global configuration.
1186     :param tmp_working_dir str: The temporary local directory containing some
1187                                 specific directories or files needed in the
1188                                 source package
1189     :param with_vcs boolean: True if the package is with vcs products (not
1190                              transformed into archive products)
1191     :param with_ftp boolean: True if the package use ftp servers to get archives
1192     :return: The dictionary
1193              {"project" : (produced project, project path in the archive)}
1194     :rtype: Dict
1195     '''
1196
1197     # Create in the working temporary directory the full project tree
1198     project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1199     products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
1200                                          "products")
1201     compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
1202                                          "products",
1203                                          "compil_scripts")
1204     post_scripts_tmp_dir = os.path.join(project_tmp_dir,
1205                                          "products",
1206                                          "post_scripts")
1207     env_scripts_tmp_dir = os.path.join(project_tmp_dir,
1208                                          "products",
1209                                          "env_scripts")
1210     patches_tmp_dir = os.path.join(project_tmp_dir,
1211                                          "products",
1212                                          "patches")
1213     application_tmp_dir = os.path.join(project_tmp_dir,
1214                                          "applications")
1215     for directory in [project_tmp_dir,
1216                       compil_scripts_tmp_dir,
1217                       env_scripts_tmp_dir,
1218                       post_scripts_tmp_dir,
1219                       patches_tmp_dir,
1220                       application_tmp_dir]:
1221         src.ensure_path_exists(directory)
1222
1223     # Create the pyconf that contains the information of the project
1224     project_pyconf_name = "project.pyconf"
1225     project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1226     ff = open(project_pyconf_file, "w")
1227     ff.write(PROJECT_TEMPLATE)
1228     if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1229         ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
1230         for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1231             ftp_path=ftp_path+":"+ftpserver
1232         ftp_path+='"'
1233         ff.write("# ftp servers where to search for prerequisite archives\n")
1234         ff.write(ftp_path)
1235     # add licence paths if any
1236     if len(config.PATHS.LICENCEPATH) > 0:
1237         licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1238         for path in config.PATHS.LICENCEPATH[1:]:
1239             licence_path=licence_path+":"+path
1240         licence_path+='"'
1241         ff.write("\n# Where to search for licences\n")
1242         ff.write(licence_path)
1243
1244
1245     ff.close()
1246
1247     # Loop over the products to get there pyconf and all the scripts
1248     # (compilation, environment, patches)
1249     # and create the pyconf file to add to the project
1250     lproducts_name = config.APPLICATION.products.keys()
1251     l_products = src.product.get_products_infos(lproducts_name, config)
1252     for p_name, p_info in l_products:
1253         # skip product with property not_in_package set to yes
1254         if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1255             continue
1256         find_product_scripts_and_pyconf(p_name,
1257                                         p_info,
1258                                         config,
1259                                         with_vcs,
1260                                         compil_scripts_tmp_dir,
1261                                         env_scripts_tmp_dir,
1262                                         post_scripts_tmp_dir,
1263                                         patches_tmp_dir,
1264                                         products_pyconf_tmp_dir)
1265
1266     # for the application pyconf, we write directly the config
1267     # don't search for the original pyconf file
1268     # to avoid problems with overwrite sections and rm_products key
1269     write_application_pyconf(config, application_tmp_dir)
1270
1271     d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1272     return d_project
1273
1274 def find_product_scripts_and_pyconf(p_name,
1275                                     p_info,
1276                                     config,
1277                                     with_vcs,
1278                                     compil_scripts_tmp_dir,
1279                                     env_scripts_tmp_dir,
1280                                     post_scripts_tmp_dir,
1281                                     patches_tmp_dir,
1282                                     products_pyconf_tmp_dir):
1283     '''Create a specific pyconf file for a given product. Get its environment
1284        script, its compilation script and patches and put it in the temporary
1285        working directory. This method is used in the source package in order to
1286        construct the specific project.
1287
1288     :param p_name str: The name of the product.
1289     :param p_info Config: The specific configuration corresponding to the
1290                              product
1291     :param config Config: The global configuration.
1292     :param with_vcs boolean: True if the package is with vcs products (not
1293                              transformed into archive products)
1294     :param compil_scripts_tmp_dir str: The path to the temporary compilation
1295                                        scripts directory of the project.
1296     :param env_scripts_tmp_dir str: The path to the temporary environment script
1297                                     directory of the project.
1298     :param post_scripts_tmp_dir str: The path to the temporary post-processing script
1299                                     directory of the project.
1300     :param patches_tmp_dir str: The path to the temporary patch scripts
1301                                 directory of the project.
1302     :param products_pyconf_tmp_dir str: The path to the temporary product
1303                                         scripts directory of the project.
1304     '''
1305
1306     # read the pyconf of the product
1307     product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1308
1309     # find the compilation script if any
1310     if src.product.product_has_script(p_info):
1311         compil_script_path = src.Path(p_info.compil_script)
1312         compil_script_path.copy(compil_scripts_tmp_dir)
1313
1314     # find the environment script if any
1315     if src.product.product_has_env_script(p_info):
1316         env_script_path = src.Path(p_info.environ.env_script)
1317         env_script_path.copy(env_scripts_tmp_dir)
1318
1319     # find the post script if any
1320     if src.product.product_has_post_script(p_info):
1321         post_script_path = src.Path(p_info.post_script)
1322         post_script_path.copy(post_scripts_tmp_dir)
1323
1324     # find the patches if any
1325     if src.product.product_has_patches(p_info):
1326         patches = src.pyconf.Sequence()
1327         for patch_path in p_info.patches:
1328             p_path = src.Path(patch_path)
1329             p_path.copy(patches_tmp_dir)
1330             patches.append(os.path.basename(patch_path), "")
1331
1332     if (not with_vcs) and src.product.product_is_vcs(p_info):
1333         # in non vcs mode, if the product is not archive, then make it become archive.
1334
1335         # depending upon the incremental mode, select impacted sections
1336         if "properties" in p_info and "incremental" in p_info.properties and\
1337             p_info.properties.incremental == "yes":
1338             sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1339         else:
1340             sections = [p_info.section]
1341         for section in sections:
1342             if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1343                 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1344                           (p_name,section))
1345                 product_pyconf_cfg[section].get_source = "archive"
1346                 if not "archive_info" in product_pyconf_cfg[section]:
1347                     product_pyconf_cfg[section].addMapping("archive_info",
1348                                         src.pyconf.Mapping(product_pyconf_cfg),
1349                                         "")
1350                     product_pyconf_cfg[section].archive_info.archive_name =\
1351                         p_info.name + ".tgz"
1352
1353     # save git repositories for vcs products, even if archive is not in VCS mode
1354     # in this case the user will be able to change get_source flag and work with git
1355     if src.product.product_is_vcs(p_info):
1356         # in vcs mode we must replace explicitely the git server url
1357         # (or it will not be found later because project files are not exported in archives)
1358         for section in product_pyconf_cfg:
1359             # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
1360             if "git_info" in product_pyconf_cfg[section]:
1361                 for repo in product_pyconf_cfg[section].git_info:
1362                     if repo in p_info.git_info:
1363                         product_pyconf_cfg[section].git_info[repo] =  p_info.git_info[repo]
1364
1365     # write the pyconf file to the temporary project location
1366     product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1367                                            p_name + ".pyconf")
1368     ff = open(product_tmp_pyconf_path, 'w')
1369     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1370     product_pyconf_cfg.__save__(ff, 1)
1371     ff.close()
1372
1373
1374 def write_application_pyconf(config, application_tmp_dir):
1375     '''Write the application pyconf file in the specific temporary
1376        directory containing the specific project of a source package.
1377
1378     :param config Config: The global configuration.
1379     :param application_tmp_dir str: The path to the temporary application
1380                                     scripts directory of the project.
1381     '''
1382     application_name = config.VARS.application
1383     # write the pyconf file to the temporary application location
1384     application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1385                                                application_name + ".pyconf")
1386     with open(application_tmp_pyconf_path, 'w') as f:
1387         f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1388         res = src.pyconf.Config()
1389         app = src.pyconf.deepCopyMapping(config.APPLICATION)
1390
1391         # set base mode to "no" for the archive
1392         app.base = "no"
1393
1394         # Change the workdir
1395         app.workdir = src.pyconf.Reference(
1396                                  app,
1397                                  src.pyconf.DOLLAR,
1398                                  'LOCAL.workdir')
1399         res.addMapping("APPLICATION", app, "")
1400         res.__save__(f, evaluated=False)
1401
1402
1403 def sat_package(config, tmp_working_dir, options, logger):
1404     '''Prepare a dictionary that stores all the needed directories and files to
1405        add in a salomeTool package.
1406
1407     :param tmp_working_dir str: The temporary local working directory
1408     :param options OptResult: the options of the launched command
1409     :return: the dictionary that stores all the needed directories and files to
1410              add in a salomeTool package.
1411              {label : (path_on_local_machine, path_in_archive)}
1412     :rtype: dict
1413     '''
1414     d_project = {}
1415
1416     # we include sat himself
1417     d_project["all_sat"]=(config.VARS.salometoolsway, "")
1418
1419     # and we overwrite local.pyconf with a clean wersion.
1420     local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1421     local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1422     local_cfg = src.pyconf.Config(local_file_path)
1423     local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1424     local_cfg.LOCAL["base"] = "default"
1425     local_cfg.LOCAL["workdir"] = "default"
1426     local_cfg.LOCAL["log_dir"] = "default"
1427     local_cfg.LOCAL["archive_dir"] = "default"
1428     local_cfg.LOCAL["VCS"] = "None"
1429     local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1430
1431     # if the archive contains a project, we write its relative path in local.pyconf
1432     if options.project:
1433         project_arch_path = os.path.join("projects", options.project,
1434                                          os.path.basename(options.project_file_path))
1435         local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1436
1437     ff = open(local_pyconf_tmp_path, 'w')
1438     local_cfg.__save__(ff, 1)
1439     ff.close()
1440     d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1441     return d_project
1442
1443
1444 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1445     '''Prepare a dictionary that stores all the needed directories and files to
1446        add in a project package.
1447
1448     :param project_file_path str: The path to the local project.
1449     :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1450     :param tmp_working_dir str: The temporary local directory containing some
1451                                 specific directories or files needed in the
1452                                 project package
1453     :param embedded_in_sat boolean : the project package is embedded in a sat package
1454     :return: the dictionary that stores all the needed directories and files to
1455              add in a project package.
1456              {label : (path_on_local_machine, path_in_archive)}
1457     :rtype: dict
1458     '''
1459     d_project = {}
1460     # Read the project file and get the directories to add to the package
1461
1462     try:
1463       project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1464     except:
1465       logger.write("""
1466 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1467       project_pyconf_cfg = src.pyconf.Config(project_file_path)
1468       project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1469
1470     paths = {"APPLICATIONPATH" : "applications",
1471              "PRODUCTPATH" : "products",
1472              "JOBPATH" : "jobs",
1473              "MACHINEPATH" : "machines"}
1474     if not ftp_mode:
1475         paths["ARCHIVEPATH"] = "archives"
1476
1477     # Loop over the project paths and add it
1478     project_file_name = os.path.basename(project_file_path)
1479     for path in paths:
1480         if path not in project_pyconf_cfg:
1481             continue
1482         if embedded_in_sat:
1483             dest_path = os.path.join("projects", name_project, paths[path])
1484             project_file_dest = os.path.join("projects", name_project, project_file_name)
1485         else:
1486             dest_path = paths[path]
1487             project_file_dest = project_file_name
1488
1489         # Add the directory to the files to add in the package
1490         d_project[path] = (project_pyconf_cfg[path], dest_path)
1491
1492         # Modify the value of the path in the package
1493         project_pyconf_cfg[path] = src.pyconf.Reference(
1494                                     project_pyconf_cfg,
1495                                     src.pyconf.DOLLAR,
1496                                     'project_path + "/' + paths[path] + '"')
1497
1498     # Modify some values
1499     if "project_path" not in project_pyconf_cfg:
1500         project_pyconf_cfg.addMapping("project_path",
1501                                       src.pyconf.Mapping(project_pyconf_cfg),
1502                                       "")
1503     project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1504                                                            src.pyconf.DOLLAR,
1505                                                            'PWD')
1506     # we don't want to export these two fields
1507     project_pyconf_cfg.__delitem__("file_path")
1508     project_pyconf_cfg.__delitem__("PWD")
1509     if ftp_mode:
1510         project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1511
1512     # Write the project pyconf file
1513     project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1514     ff = open(project_pyconf_tmp_path, 'w')
1515     ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1516     project_pyconf_cfg.__save__(ff, 1)
1517     ff.close()
1518     d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1519
1520     return d_project
1521
1522 def add_readme(config, options, where):
1523     readme_path = os.path.join(where, "README")
1524     with codecs.open(readme_path, "w", 'utf-8') as f:
1525
1526     # templates for building the header
1527         readme_header="""
1528 # This package was generated with sat $version
1529 # Date: $date
1530 # User: $user
1531 # Distribution : $dist
1532
1533 In the following, $$ROOT represents the directory where you have installed
1534 SALOME (the directory where this file is located).
1535
1536 """
1537         if src.architecture.is_windows():
1538             readme_header = readme_header.replace('$$ROOT','%ROOT%')
1539         readme_compilation_with_binaries="""
1540
1541 compilation based on the binaries used as prerequisites
1542 =======================================================
1543
1544 If you fail to compile the complete application (for example because
1545 you are not root on your system and cannot install missing packages), you
1546 may try a partial compilation based on the binaries.
1547 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1548 and do some substitutions on cmake and .la files (replace the build directories
1549 with local paths).
1550 The procedure to do it is:
1551  1) Remove or rename INSTALL directory if it exists
1552  2) Execute the shell script install_bin.sh:
1553  > cd $ROOT
1554  > ./install_bin.sh
1555  3) Use SalomeTool (as explained in Sources section) and compile only the
1556     modules you need to (with -p option)
1557
1558 """
1559         readme_header_tpl=string.Template(readme_header)
1560         readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1561                 "README_BIN.template")
1562         readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1563                 "README_LAUNCHER.template")
1564         readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1565                 "README_BIN_VIRTUAL_APP.template")
1566         readme_template_path_src = os.path.join(config.VARS.internal_dir,
1567                 "README_SRC.template")
1568         readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1569                 "README_PROJECT.template")
1570         readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1571                 "README_SAT.template")
1572
1573         # prepare substitution dictionary
1574         d = dict()
1575         d['user'] = config.VARS.user
1576         d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1577         d['version'] = src.get_salometool_version(config)
1578         d['dist'] = config.VARS.dist
1579         f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1580
1581         if options.binaries or options.sources:
1582             d['application'] = config.VARS.application
1583             d['BINARIES']    = config.INTERNAL.config.binary_dir
1584             d['SEPARATOR'] = config.VARS.sep
1585             if src.architecture.is_windows():
1586                 d['operatingSystem'] = 'Windows'
1587                 d['PYTHON3'] = 'python3'
1588                 d['ROOT']    = '%ROOT%'
1589             else:
1590                 d['operatingSystem'] = 'Linux'
1591                 d['PYTHON3'] = ''
1592                 d['ROOT']    = '$ROOT'
1593             f.write("# Application: " + d['application'] + "\n")
1594             if 'KERNEL' in config.APPLICATION.products:
1595                 VersionSalome = src.get_salome_version(config)
1596                 # Case where SALOME has the launcher that uses the SalomeContext API
1597                 if VersionSalome >= MMP([7,3,0]):
1598                     d['launcher'] = config.APPLICATION.profile.launcher_name
1599                 else:
1600                     d['virtual_app'] = 'runAppli' # this info is not used now)
1601
1602         # write the specific sections
1603         if options.binaries:
1604             f.write(src.template.substitute(readme_template_path_bin, d))
1605             if "virtual_app" in d:
1606                 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1607             if "launcher" in d:
1608                 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1609
1610         if options.sources:
1611             f.write(src.template.substitute(readme_template_path_src, d))
1612
1613         if options.binaries and options.sources and not src.architecture.is_windows():
1614             f.write(readme_compilation_with_binaries)
1615
1616         if options.project:
1617             f.write(src.template.substitute(readme_template_path_pro, d))
1618
1619         if options.sat:
1620             f.write(src.template.substitute(readme_template_path_sat, d))
1621
1622     return readme_path
1623
1624 def update_config(config, logger,  prop, value):
1625     '''Remove from config.APPLICATION.products the products that have the property given as input.
1626
1627     :param config Config: The global config.
1628     :param prop str: The property to filter
1629     :param value str: The value of the property to filter
1630     '''
1631     # if there is no APPLICATION (ex sat package -t) : nothing to do
1632     if "APPLICATION" in config:
1633         l_product_to_remove = []
1634         for product_name in config.APPLICATION.products.keys():
1635             prod_cfg = src.product.get_product_config(config, product_name)
1636             if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1637                 l_product_to_remove.append(product_name)
1638         for product_name in l_product_to_remove:
1639             config.APPLICATION.products.__delitem__(product_name)
1640             logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1641
1642 def description():
1643     '''method that is called when salomeTools is called with --help option.
1644
1645     :return: The text to display for the package command description.
1646     :rtype: str
1647     '''
1648     return _("""
1649 The package command creates a tar file archive of a product.
1650 There are four kinds of archive, which can be mixed:
1651
1652  1 - The binary archive.
1653      It contains the product installation directories plus a launcher.
1654  2 - The sources archive.
1655      It contains the product archives, a project (the application plus salomeTools).
1656  3 - The project archive.
1657      It contains a project (give the project file path as argument).
1658  4 - The salomeTools archive.
1659      It contains code utility salomeTools.
1660
1661 example:
1662  >> sat package SALOME-master --binaries --sources""")
1663
1664 def run(args, runner, logger):
1665     '''method that is called when salomeTools is called with package parameter.
1666     '''
1667
1668     # Parse the options
1669     (options, args) = parser.parse_args(args)
1670
1671
1672     # Check that a type of package is called, and only one
1673     all_option_types = (options.binaries,
1674                         options.sources,
1675                         options.project not in ["", None],
1676                         options.sat,
1677                         options.bin_products)
1678
1679     # Check if no option for package type
1680     if all_option_types.count(True) == 0:
1681         msg = _("Error: Precise a type for the package\nUse one of the "
1682                 "following options: --binaries, --sources, --project or"
1683                 " --salometools, --bin_products")
1684         logger.write(src.printcolors.printcError(msg), 1)
1685         logger.write("\n", 1)
1686         return 1
1687     do_create_package = options.binaries or options.sources or options.project or options.sat
1688
1689     if options.bin_products:
1690         ret = bin_products_archives(runner.cfg, logger, options.with_vcs)
1691         if ret!=0:
1692             return ret
1693     if not do_create_package:
1694         return 0
1695
1696     # continue to create a tar.gz package
1697
1698     # The repository where to put the package if not Binary or Source
1699     package_default_path = runner.cfg.LOCAL.workdir
1700     # if the package contains binaries or sources:
1701     if options.binaries or options.sources or options.bin_products:
1702         # Check that the command has been called with an application
1703         src.check_config_has_application(runner.cfg)
1704
1705         # Display information
1706         logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1707                                                     runner.cfg.VARS.application), 1)
1708
1709         # Get the default directory where to put the packages
1710         package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1711         src.ensure_path_exists(package_default_path)
1712
1713     # if the package contains a project:
1714     if options.project:
1715         # check that the project is visible by SAT
1716         projectNameFile = options.project + ".pyconf"
1717         foundProject = None
1718         for i in runner.cfg.PROJECTS.project_file_paths:
1719             baseName = os.path.basename(i)
1720             if baseName == projectNameFile:
1721                 foundProject = i
1722                 break
1723
1724         if foundProject is None:
1725             local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1726             msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1727 known projects are:
1728 %(2)s
1729
1730 Please add it in file:
1731 %(3)s""" % \
1732                     {"1": options.project, "2": "\n  ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1733             logger.write(src.printcolors.printcError(msg), 1)
1734             logger.write("\n", 1)
1735             return 1
1736         else:
1737             options.project_file_path = foundProject
1738             src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1739
1740     # Remove the products that are filtered by the --without_properties option
1741     if options.without_properties:
1742         prop, value = options.without_properties
1743         update_config(runner.cfg, logger, prop, value)
1744
1745     # Remove from config the products that have the not_in_package property
1746     update_config(runner.cfg, logger, "not_in_package", "yes")
1747
1748     # get the name of the archive or build it
1749     if options.name:
1750         if os.path.basename(options.name) == options.name:
1751             # only a name (not a path)
1752             archive_name = options.name
1753             dir_name = package_default_path
1754         else:
1755             archive_name = os.path.basename(options.name)
1756             dir_name = os.path.dirname(options.name)
1757
1758         # suppress extension
1759         if archive_name[-len(".tgz"):] == ".tgz":
1760             archive_name = archive_name[:-len(".tgz")]
1761         if archive_name[-len(".tar.gz"):] == ".tar.gz":
1762             archive_name = archive_name[:-len(".tar.gz")]
1763
1764     else:
1765         archive_name=""
1766         dir_name = package_default_path
1767         if options.binaries or options.sources:
1768             archive_name = runner.cfg.APPLICATION.name
1769
1770         if options.binaries:
1771             archive_name += "-"+runner.cfg.VARS.dist
1772
1773         if options.sources:
1774             archive_name += "-SRC"
1775             if options.with_vcs:
1776                 archive_name += "-VCS"
1777
1778         if options.sat:
1779             archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1780
1781         if options.project:
1782             if options.sat:
1783                 archive_name += "_"
1784             archive_name += ("satproject_" + options.project)
1785
1786         if len(archive_name)==0: # no option worked
1787             msg = _("Error: Cannot name the archive\n"
1788                     " check if at least one of the following options was "
1789                     "selected : --binaries, --sources, --project or"
1790                     " --salometools")
1791             logger.write(src.printcolors.printcError(msg), 1)
1792             logger.write("\n", 1)
1793             return 1
1794
1795     path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1796
1797     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1798
1799     # Create a working directory for all files that are produced during the
1800     # package creation and that will be removed at the end of the command
1801     tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1802     src.ensure_path_exists(tmp_working_dir)
1803     logger.write("\n", 5)
1804     logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1805
1806     logger.write("\n", 3)
1807
1808     msg = _("Preparation of files to add to the archive")
1809     logger.write(src.printcolors.printcLabel(msg), 2)
1810     logger.write("\n", 2)
1811
1812     d_files_to_add={}  # content of the archive
1813
1814     # a dict to hold paths that will need to be substitute for users recompilations
1815     d_paths_to_substitute={}
1816
1817     if options.binaries:
1818         d_bin_files_to_add = binary_package(runner.cfg,
1819                                             logger,
1820                                             options,
1821                                             tmp_working_dir)
1822         # for all binaries dir, store the substitution that will be required
1823         # for extra compilations
1824         for key in d_bin_files_to_add:
1825             if key.endswith("(bin)"):
1826                 source_dir = d_bin_files_to_add[key][0]
1827                 path_in_archive = d_bin_files_to_add[key][1].replace(
1828                    runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1829                    runner.cfg.INTERNAL.config.install_dir)
1830                 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1831                     # if basename is the same we will just substitute the dirname
1832                     d_paths_to_substitute[os.path.dirname(source_dir)]=\
1833                         os.path.dirname(path_in_archive)
1834                 else:
1835                     d_paths_to_substitute[source_dir]=path_in_archive
1836
1837         d_files_to_add.update(d_bin_files_to_add)
1838     if options.sources:
1839         d_files_to_add.update(source_package(runner,
1840                                         runner.cfg,
1841                                         logger,
1842                                         options,
1843                                         tmp_working_dir))
1844         if options.binaries:
1845             # for archives with bin and sources we provide a shell script able to
1846             # install binaries for compilation
1847             file_install_bin=produce_install_bin_file(runner.cfg,logger,
1848                                                       tmp_working_dir,
1849                                                       d_paths_to_substitute,
1850                                                       "install_bin.sh")
1851             d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1852             logger.write("substitutions that need to be done later : \n", 5)
1853             logger.write(str(d_paths_to_substitute), 5)
1854             logger.write("\n", 5)
1855     else:
1856         # --salomeTool option is not considered when --sources is selected, as this option
1857         # already brings salomeTool!
1858         if options.sat:
1859             d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir,
1860                                   options, logger))
1861
1862     if options.project:
1863         DBG.write("config for package %s" % options.project, runner.cfg)
1864         d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1865
1866     if not(d_files_to_add):
1867         msg = _("Error: Empty dictionnary to build the archive!\n")
1868         logger.write(src.printcolors.printcError(msg), 1)
1869         logger.write("\n", 1)
1870         return 1
1871
1872     # Add the README file in the package
1873     local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1874     d_files_to_add["README"] = (local_readme_tmp_path, "README")
1875
1876     # Add the additional files of option add_files
1877     if options.add_files:
1878         for file_path in options.add_files:
1879             if not os.path.exists(file_path):
1880                 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1881                 continue
1882             file_name = os.path.basename(file_path)
1883             d_files_to_add[file_name] = (file_path, file_name)
1884
1885     logger.write("\n", 2)
1886     logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1887     logger.write("\n", 2)
1888     logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1889
1890     res = 0
1891     try:
1892         # Creating the object tarfile
1893         tar = tarfile.open(path_targz, mode='w:gz')
1894
1895         # get the filtering function if needed
1896         if old_python:
1897             filter_function = exclude_VCS_and_extensions_26
1898         else:
1899             filter_function = exclude_VCS_and_extensions
1900
1901         # Add the files to the tarfile object
1902         res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1903         tar.close()
1904     except KeyboardInterrupt:
1905         logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1906         logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1907         # remove the working directory
1908         shutil.rmtree(tmp_working_dir)
1909         logger.write(_("OK"), 1)
1910         logger.write(_("\n"), 1)
1911         return 1
1912
1913     # case if no application, only package sat as 'sat package -t'
1914     try:
1915         app = runner.cfg.APPLICATION
1916     except:
1917         app = None
1918
1919     # unconditionaly remove the tmp_local_working_dir
1920     if app is not None:
1921         tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1922         if os.path.isdir(tmp_local_working_dir):
1923             shutil.rmtree(tmp_local_working_dir)
1924
1925     # remove the tmp directory, unless user has registered as developer
1926     if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1927         shutil.rmtree(tmp_working_dir)
1928
1929     # Print again the path of the package
1930     logger.write("\n", 2)
1931     src.printcolors.print_value(logger, "Package path", path_targz, 2)
1932
1933     return res