3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
31 from application import get_SALOME_modules
32 import src.debug as DBG
34 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
41 ARCHIVE_DIR = "ARCHIVES"
42 PROJECT_DIR = "PROJECT"
44 IGNORED_DIRS = [".git", ".svn"]
45 IGNORED_EXTENSIONS = []
47 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
49 if src.architecture.is_windows():
50 PROJECT_TEMPLATE = """#!/usr/bin/env python
53 # The path to the archive root directory
54 root_path : $PWD + "/../"
56 project_path : $PWD + "/"
58 # Where to search the archives of the products
59 ARCHIVEPATH : $root_path + "ARCHIVES"
60 # Where to search the pyconf of the applications
61 APPLICATIONPATH : $project_path + "applications/"
62 # Where to search the pyconf of the products
63 PRODUCTPATH : $project_path + "products/"
64 # Where to search the pyconf of the jobs of the project
65 JOBPATH : $project_path + "jobs/"
66 # Where to search the pyconf of the machines of the project
67 MACHINEPATH : $project_path + "machines/"
70 PROJECT_TEMPLATE = """#!/usr/bin/env python
74 project_path : $PWD + "/"
76 # Where to search the archives of the products
77 ARCHIVEPATH : $project_path + "ARCHIVES"
78 # Where to search the pyconf of the applications
79 APPLICATIONPATH : $project_path + "applications/"
80 # Where to search the pyconf of the products
81 PRODUCTPATH : $project_path + "products/"
82 # Where to search the pyconf of the jobs of the project
83 JOBPATH : $project_path + "jobs/"
84 # Where to search the pyconf of the machines of the project
85 MACHINEPATH : $project_path + "machines/"
89 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
97 archive_dir : 'default'
110 # Define all possible option for the package command : sat package <options>
111 parser = src.options.Options()
112 parser.add_option('b', 'binaries', 'boolean', 'binaries',
113 _('Optional: Produce a binary package.'), False)
114 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
115 _('Optional: Only binary package: produce the archive even if '
116 'there are some missing products.'), False)
117 parser.add_option('s', 'sources', 'boolean', 'sources',
118 _('Optional: Produce a compilable archive of the sources of the '
119 'application.'), False)
120 parser.add_option('', 'bin_products', 'boolean', 'bin_products',
121 _('Optional: Create binary archives for all products.'), False)
122 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
123 _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
124 'Sat prepare will use VCS mode instead to retrieve them'),
126 parser.add_option('', 'ftp', 'boolean', 'ftp',
127 _('Optional: Do not embed archives for products in archive mode.'
128 'Sat prepare will use ftp instead to retrieve them'),
130 parser.add_option('e', 'exe', 'string', 'exe',
131 _('Optional: Produce an extra launcher based upon the exe given as argument.'), "")
132 parser.add_option('p', 'project', 'string', 'project',
133 _('Optional: Produce an archive that contains a project.'), "")
134 parser.add_option('t', 'salometools', 'boolean', 'sat',
135 _('Optional: Produce an archive that contains salomeTools.'), False)
136 parser.add_option('n', 'name', 'string', 'name',
137 _('Optional: The name or full path of the archive.'), None)
138 parser.add_option('', 'add_files', 'list2', 'add_files',
139 _('Optional: The list of additional files to add to the archive.'), [])
140 parser.add_option('', 'without_properties', 'properties', 'without_properties',
141 _('Optional: Filter the products by their properties.\n\tSyntax: '
142 '--without_properties <property>:<value>'))
145 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
146 '''Create an archive containing all directories and files that are given in
147 the d_content argument.
149 :param tar tarfile: The tarfile instance used to make the archive.
150 :param name_archive str: The name of the archive to make.
151 :param d_content dict: The dictionary that contain all directories and files
152 to add in the archive.
154 (path_on_local_machine, path_in_archive)
155 :param logger Logger: the logging instance
156 :param f_exclude Function: the function that filters
157 :return: 0 if success, 1 if not.
160 # get the max length of the messages in order to make the display
161 max_len = len(max(d_content.keys(), key=len))
164 # loop over each directory or file stored in the d_content dictionary
165 names = sorted(d_content.keys())
166 DBG.write("add tar names", names)
168 # used to avoid duplications (for pip install in python, or single_install_dir cases)
171 # display information
172 len_points = max_len - len(name) + 3
173 local_path, archive_path = d_content[name]
174 in_archive = os.path.join(name_archive, archive_path)
175 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
176 # Get the local path and the path in archive
177 # of the directory or file to add
178 # Add it in the archive
180 key=local_path+"->"+in_archive
181 if key not in already_added:
185 exclude=exclude_VCS_and_extensions_26)
189 filter=exclude_VCS_and_extensions)
190 already_added.add(key)
191 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
192 except Exception as e:
193 logger.write(src.printcolors.printcError(_("KO ")), 3)
194 logger.write(str(e), 3)
196 logger.write("\n", 3)
200 def exclude_VCS_and_extensions_26(filename):
201 ''' The function that is used to exclude from package the link to the
202 VCS repositories (like .git) (only for python 2.6)
204 :param filename Str: The filname to exclude (or not).
205 :return: True if the file has to be exclude
208 for dir_name in IGNORED_DIRS:
209 if dir_name in filename:
211 for extension in IGNORED_EXTENSIONS:
212 if filename.endswith(extension):
216 def exclude_VCS_and_extensions(tarinfo):
217 ''' The function that is used to exclude from package the link to the
218 VCS repositories (like .git)
220 :param filename Str: The filname to exclude (or not).
221 :return: None if the file has to be exclude
222 :rtype: tarinfo or None
224 filename = tarinfo.name
225 for dir_name in IGNORED_DIRS:
226 if dir_name in filename:
228 for extension in IGNORED_EXTENSIONS:
229 if filename.endswith(extension):
233 def produce_relative_launcher(config,
238 '''Create a specific SALOME launcher for the binary package. This launcher
241 :param config Config: The global configuration.
242 :param logger Logger: the logging instance
243 :param file_dir str: the directory where to put the launcher
244 :param file_name str: The launcher name
245 :param binaries_dir_name str: the name of the repository where the binaries
247 :return: the path of the produced launcher
251 # set base mode to "no" for the archive - save current mode to restore it at the end
252 if "base" in config.APPLICATION:
253 base_setting=config.APPLICATION.base
256 config.APPLICATION.base="no"
258 # get KERNEL installation path
259 kernel_info = src.product.get_product_config(config, "KERNEL")
260 kernel_base_name=os.path.basename(kernel_info.install_dir)
261 if kernel_info.install_mode == "base":
262 # case of kernel installed in base. the kernel install dir name is different in the archive
263 kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
265 kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
267 # set kernel bin dir (considering fhs property)
268 kernel_cfg = src.product.get_product_config(config, "KERNEL")
269 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
270 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
272 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
274 # check if the application contains an application module
275 # check also if the application has a distene product,
276 # in this case get its licence file name
277 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
278 salome_application_name="Not defined"
279 distene_licence_file_name=False
280 for prod_name, prod_info in l_product_info:
281 # look for a "salome application" and a distene product
282 if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
283 distene_licence_file_name = src.product.product_has_licence(prod_info,
284 config.PATHS.LICENCEPATH)
285 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
286 salome_application_name=prod_info.name
288 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
289 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
290 if salome_application_name == "Not defined":
291 app_root_dir=kernel_root_dir
293 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
296 additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
297 config.VARS.sep + bin_kernel_install_dir
298 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
299 additional_env['sat_python_version'] = 3
301 additional_env['sat_python_version'] = 2
303 additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
304 launcher_name = src.get_launcher_name(config)
305 additional_env['APPLI'] = "out_dir_Path" + config.VARS.sep + file_name
307 # create an environment file writer
308 writer = src.environment.FileEnvWriter(config,
314 filepath = os.path.join(file_dir, file_name)
316 writer.write_env_file(filepath,
319 additional_env=additional_env,
320 no_path_init="False",
321 for_package = binaries_dir_name)
323 # Little hack to put out_dir_Path outside the strings
324 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
325 src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
327 # A hack to put a call to a file for distene licence.
328 # It does nothing to an application that has no distene product
329 if distene_licence_file_name:
330 logger.write("Application has a distene licence file! We use it in package launcher", 5)
331 hack_for_distene_licence(filepath, distene_licence_file_name)
333 # change the rights in order to make the file executable for everybody
343 # restore modified setting by its initial value
344 config.APPLICATION.base=base_setting
348 def hack_for_distene_licence(filepath, licence_file):
349 '''Replace the distene licence env variable by a call to a file.
351 :param filepath Str: The path to the launcher to modify.
353 shutil.move(filepath, filepath + "_old")
355 filein = filepath + "_old"
356 fin = open(filein, "r")
357 fout = open(fileout, "w")
358 text = fin.readlines()
359 # Find the Distene section
361 for i,line in enumerate(text):
362 if "# Set DISTENE License" in line:
366 # No distene product, there is nothing to do
372 del text[num_line +1]
373 del text[num_line +1]
374 text_to_insert =""" try:
375 distene_licence_file=r"%s"
376 if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
377 import importlib.util
378 spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
379 distene=importlib.util.module_from_spec(spec_dist)
380 spec_dist.loader.exec_module(distene)
383 distene = imp.load_source('distene_licence', distene_licence_file)
384 distene.set_distene_variables(context)
386 pass\n""" % licence_file
387 text.insert(num_line + 1, text_to_insert)
394 def produce_relative_env_files(config,
399 '''Create some specific environment files for the binary package. These
400 files use relative paths.
402 :param config Config: The global configuration.
403 :param logger Logger: the logging instance
404 :param file_dir str: the directory where to put the files
405 :param binaries_dir_name str: the name of the repository where the binaries
407 :param exe_name str: if given generate a launcher executing exe_name
408 :return: the list of path of the produced environment files
412 # set base mode to "no" for the archive - save current mode to restore it at the end
413 if "base" in config.APPLICATION:
414 base_setting=config.APPLICATION.base
417 config.APPLICATION.base="no"
419 # create an environment file writer
420 writer = src.environment.FileEnvWriter(config,
425 if src.architecture.is_windows():
427 filename = "env_launch.bat"
430 filename = "env_launch.sh"
433 filename=os.path.basename(exe_name)
436 filepath = writer.write_env_file(filename,
439 for_package = binaries_dir_name)
441 # Little hack to put out_dir_Path as environment variable
442 if src.architecture.is_windows() :
443 src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
444 src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
445 src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
447 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
448 src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
451 if src.architecture.is_windows():
452 cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
454 cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
455 with open(filepath, "a") as exe_launcher:
456 exe_launcher.write(cmd)
458 # change the rights in order to make the file executable for everybody
468 # restore modified setting by its initial value
469 config.APPLICATION.base=base_setting
473 def produce_install_bin_file(config,
478 '''Create a bash shell script which do substitutions in BIRARIES dir
479 in order to use it for extra compilations.
481 :param config Config: The global configuration.
482 :param logger Logger: the logging instance
483 :param file_dir str: the directory where to put the files
484 :param d_sub, dict: the dictionnary that contains the substitutions to be done
485 :param file_name str: the name of the install script file
486 :return: the produced file
490 filepath = os.path.join(file_dir, file_name)
491 # open the file and write into it
492 # use codec utf-8 as sat variables are in unicode
493 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
494 installbin_template_path = os.path.join(config.VARS.internal_dir,
495 "INSTALL_BIN.template")
497 # build the name of the directory that will contain the binaries
498 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
499 # build the substitution loop
500 loop_cmd = "for f in $(grep -RIl"
502 loop_cmd += " -e "+ key
503 loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
506 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
507 loop_cmd += ' " $f\ndone'
510 d["BINARIES_DIR"] = binaries_dir_name
511 d["SUBSTITUTION_LOOP"]=loop_cmd
512 d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
514 # substitute the template and write it in file
515 content=src.template.substitute(installbin_template_path, d)
516 installbin_file.write(content)
517 # change the rights in order to make the file executable for everybody
529 def product_appli_creation_script(config,
533 '''Create a script that can produce an application (EDF style) in the binary
536 :param config Config: The global configuration.
537 :param logger Logger: the logging instance
538 :param file_dir str: the directory where to put the file
539 :param binaries_dir_name str: the name of the repository where the binaries
541 :return: the path of the produced script file
544 template_name = "create_appli.py.for_bin_packages.template"
545 template_path = os.path.join(config.VARS.internal_dir, template_name)
546 text_to_fill = open(template_path, "r").read()
547 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
548 '"' + binaries_dir_name + '"')
551 for product_name in get_SALOME_modules(config):
552 product_info = src.product.get_product_config(config, product_name)
554 if src.product.product_is_smesh_plugin(product_info):
557 if 'install_dir' in product_info and bool(product_info.install_dir):
558 if src.product.product_is_cpp(product_info):
560 for cpp_name in src.product.get_product_components(product_info):
561 line_to_add = ("<module name=\"" +
563 "\" gui=\"yes\" path=\"''' + "
564 "os.path.join(dir_bin_name, \"" +
565 cpp_name + "\") + '''\"/>")
568 line_to_add = ("<module name=\"" +
570 "\" gui=\"yes\" path=\"''' + "
571 "os.path.join(dir_bin_name, \"" +
572 product_name + "\") + '''\"/>")
573 text_to_add += line_to_add + "\n"
575 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
577 tmp_file_path = os.path.join(file_dir, "create_appli.py")
578 ff = open(tmp_file_path, "w")
579 ff.write(filled_text)
582 # change the rights in order to make the file executable for everybody
583 os.chmod(tmp_file_path,
594 def bin_products_archives(config, logger, only_vcs):
595 '''Prepare binary packages for all products
596 :param config Config: The global configuration.
597 :return: the error status
601 logger.write("Make %s binary archives\n" % config.VARS.dist)
602 # Get the default directory where to put the packages
603 binpackage_path = os.path.join(config.APPLICATION.workdir, "PACKAGE", "products")
604 src.ensure_path_exists(binpackage_path)
605 # Get the list of product installation to add to the archive
606 l_products_name = sorted(config.APPLICATION.products.keys())
607 l_product_info = src.product.get_products_infos(l_products_name,
609 # first loop on products : filter products, analyse properties,
610 # and store the information that will be used to create the archive in the second loop
611 l_not_installed=[] # store not installed products for warning at the end
612 for prod_name, prod_info in l_product_info:
613 # ignore the native and fixed products for install directories
614 if (src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
615 or src.product.product_is_native(prod_info)
616 or src.product.product_is_fixed(prod_info)
617 or not src.product.product_compiles(prod_info)):
619 if only_vcs and not src.product.product_is_vcs(prod_info):
621 if not src.product.check_installation(config, prod_info):
622 l_not_installed.append(prod_name)
623 continue # product is not installed, we skip it
624 # prepare call to make_bin_archive
625 path_targz_prod = os.path.join(binpackage_path, prod_name + '-' + prod_info.version + "-" + config.VARS.dist + PACKAGE_EXT)
626 targz_prod = tarfile.open(path_targz_prod, mode='w:gz')
627 bin_path = prod_info.install_dir
628 targz_prod.add(bin_path)
630 # Python program to find MD5 hash value of a file
632 with open(path_targz_prod,"rb") as f:
633 bytes = f.read() # read file as bytes
634 readable_hash = hashlib.md5(bytes).hexdigest();
635 with open(path_targz_prod+".md5", "w") as md5sum:
636 md5sum.write("%s %s" % (readable_hash, os.path.basename(path_targz_prod)))
637 logger.write(" archive : %s (md5sum = %s)\n" % (path_targz_prod, readable_hash))
641 def binary_package(config, logger, options, tmp_working_dir):
642 '''Prepare a dictionary that stores all the needed directories and files to
643 add in a binary package.
645 :param config Config: The global configuration.
646 :param logger Logger: the logging instance
647 :param options OptResult: the options of the launched command
648 :param tmp_working_dir str: The temporary local directory containing some
649 specific directories or files needed in the
651 :return: the dictionary that stores all the needed directories and files to
652 add in a binary package.
653 {label : (path_on_local_machine, path_in_archive)}
657 # Get the list of product installation to add to the archive
658 l_products_name = sorted(config.APPLICATION.products.keys())
659 l_product_info = src.product.get_products_infos(l_products_name,
662 # suppress compile time products for binaries-only archives
663 if not options.sources:
664 update_config(config, logger, "compile_time", "yes")
669 l_sources_not_present = []
670 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
671 if ("APPLICATION" in config and
672 "properties" in config.APPLICATION and
673 "mesa_launcher_in_package" in config.APPLICATION.properties and
674 config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
675 generate_mesa_launcher=True
677 # first loop on products : filter products, analyse properties,
678 # and store the information that will be used to create the archive in the second loop
679 for prod_name, prod_info in l_product_info:
680 # skip product with property not_in_package set to yes
681 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
684 # Add the sources of the products that have the property
685 # sources_in_package : "yes"
686 if src.get_property_in_product_cfg(prod_info,
687 "sources_in_package") == "yes":
688 if os.path.exists(prod_info.source_dir):
689 l_source_dir.append((prod_name, prod_info.source_dir))
691 l_sources_not_present.append(prod_name)
693 # ignore the native and fixed products for install directories
694 if (src.product.product_is_native(prod_info)
695 or src.product.product_is_fixed(prod_info)
696 or not src.product.product_compiles(prod_info)):
699 # products with single_dir property will be installed in the PRODUCTS directory of the archive
700 is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
701 src.product.product_test_property(prod_info,"single_install_dir", "yes"))
702 if src.product.check_installation(config, prod_info):
703 l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir,
704 is_single_dir, prod_info.install_mode))
706 l_not_installed.append(prod_name)
708 # Add also the cpp generated modules (if any)
709 if src.product.product_is_cpp(prod_info):
711 for name_cpp in src.product.get_product_components(prod_info):
712 install_dir = os.path.join(config.APPLICATION.workdir,
713 config.INTERNAL.config.install_dir,
715 if os.path.exists(install_dir):
716 l_install_dir.append((name_cpp, name_cpp, install_dir, False, "value"))
718 l_not_installed.append(name_cpp)
720 # check the name of the directory that (could) contains the binaries
721 # from previous detar
722 binaries_from_detar = os.path.join(
723 config.APPLICATION.workdir,
724 config.INTERNAL.config.binary_dir + config.VARS.dist)
725 if os.path.exists(binaries_from_detar):
727 WARNING: existing binaries directory from previous detar installation:
729 To make new package from this, you have to:
730 1) install binaries in INSTALL directory with the script "install_bin.sh"
731 see README file for more details
732 2) or recompile everything in INSTALL with "sat compile" command
733 this step is long, and requires some linux packages to be installed
735 """ % binaries_from_detar)
737 # Print warning or error if there are some missing products
738 if len(l_not_installed) > 0:
739 text_missing_prods = ""
740 for p_name in l_not_installed:
741 text_missing_prods += " - " + p_name + "\n"
742 if not options.force_creation:
743 msg = _("ERROR: there are missing product installations:")
744 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
747 raise src.SatException(msg)
749 msg = _("WARNING: there are missing products installations:")
750 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
754 # Do the same for sources
755 if len(l_sources_not_present) > 0:
756 text_missing_prods = ""
757 for p_name in l_sources_not_present:
758 text_missing_prods += "-" + p_name + "\n"
759 if not options.force_creation:
760 msg = _("ERROR: there are missing product sources:")
761 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
764 raise src.SatException(msg)
766 msg = _("WARNING: there are missing products sources:")
767 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
771 # construct the name of the directory that will contain the binaries
772 if src.architecture.is_windows():
773 binaries_dir_name = config.INTERNAL.config.binary_dir
775 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
776 # construct the correlation table between the product names, there
777 # actual install directories and there install directory in archive
779 for prod_name, prod_info_name, install_dir, is_single_dir, install_mode in l_install_dir:
780 prod_base_name=os.path.basename(install_dir)
781 if install_mode == "base":
782 # case of a products installed in base.
783 # because the archive is in base:no mode, the name of the install dir is different inside archive
784 # we set it to the product name or by PRODUCTS if single-dir
786 prod_base_name=config.INTERNAL.config.single_install_dir
788 prod_base_name=prod_info_name
789 path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
790 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
792 for prod_name, source_dir in l_source_dir:
793 path_in_archive = os.path.join("SOURCES", prod_name)
794 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
796 # create an archives of compilation logs, and insert it into the tarball
797 logpath=os.path.join(config.APPLICATION.workdir, "LOGS")
798 path_targz_logs = os.path.join(tmp_working_dir, "logs.tgz")
799 tar_log = tarfile.open(path_targz_logs, mode='w:gz')
800 tar_log.add(logpath, arcname="LOGS")
802 d_products["LOGS"] = (path_targz_logs, "logs.tgz")
804 # for packages of SALOME applications including KERNEL,
805 # we produce a salome launcher or a virtual application (depending on salome version)
806 if 'KERNEL' in config.APPLICATION.products:
807 VersionSalome = src.get_salome_version(config)
808 # Case where SALOME has the launcher that uses the SalomeContext API
809 if VersionSalome >= 730:
810 # create the relative launcher and add it to the files to add
811 launcher_name = src.get_launcher_name(config)
812 launcher_package = produce_relative_launcher(config,
817 d_products["launcher"] = (launcher_package, launcher_name)
819 # if the application contains mesa products, we generate in addition to the
820 # classical salome launcher a launcher using mesa and called mesa_salome
821 # (the mesa launcher will be used for remote usage through ssh).
822 if generate_mesa_launcher:
823 #if there is one : store the use_mesa property
824 restore_use_mesa_option=None
825 if ('properties' in config.APPLICATION and
826 'use_mesa' in config.APPLICATION.properties):
827 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
829 # activate mesa property, and generate a mesa launcher
830 src.activate_mesa_property(config) #activate use_mesa property
831 launcher_mesa_name="mesa_"+launcher_name
832 launcher_package_mesa = produce_relative_launcher(config,
837 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
839 # if there was a use_mesa value, we restore it
840 # else we set it to the default value "no"
841 if restore_use_mesa_option != None:
842 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
844 config.APPLICATION.properties.use_mesa="no"
847 # if we mix binaries and sources, we add a copy of the launcher,
848 # prefixed with "bin",in order to avoid clashes
849 launcher_copy_name="bin"+launcher_name
850 launcher_package_copy = produce_relative_launcher(config,
855 d_products["launcher (copy)"] = (launcher_package_copy, launcher_copy_name)
857 # Provide a script for the creation of an application EDF style
858 appli_script = product_appli_creation_script(config,
863 d_products["appli script"] = (appli_script, "create_appli.py")
865 # Put also the environment file
866 env_file = produce_relative_env_files(config,
871 if src.architecture.is_windows():
872 filename = "env_launch.bat"
874 filename = "env_launch.sh"
875 d_products["environment file"] = (env_file, filename)
877 # If option exe, produce an extra launcher based on specified exe
879 exe_file = produce_relative_env_files(config,
885 if src.architecture.is_windows():
886 filename = os.path.basename(options.exe) + ".bat"
888 filename = os.path.basename(options.exe) + ".sh"
889 d_products["exe file"] = (exe_file, filename)
894 def source_package(sat, config, logger, options, tmp_working_dir):
895 '''Prepare a dictionary that stores all the needed directories and files to
896 add in a source package.
898 :param config Config: The global configuration.
899 :param logger Logger: the logging instance
900 :param options OptResult: the options of the launched command
901 :param tmp_working_dir str: The temporary local directory containing some
902 specific directories or files needed in the
904 :return: the dictionary that stores all the needed directories and files to
905 add in a source package.
906 {label : (path_on_local_machine, path_in_archive)}
911 # Get all the products that are prepared using an archive
912 # unless ftp mode is specified (in this case the user of the
913 # archive will get the sources through the ftp mode of sat prepare
915 logger.write("Find archive products ... ")
916 d_archives, l_pinfo_vcs = get_archives(config, logger)
917 logger.write("Done\n")
920 if not options.with_vcs and len(l_pinfo_vcs) > 0:
921 # Make archives with the products that are not prepared using an archive
922 # (git, cvs, svn, etc)
923 logger.write("Construct archives for vcs products ... ")
924 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
929 logger.write("Done\n")
932 logger.write("Create the project ... ")
933 d_project = create_project_for_src_package(config,
937 logger.write("Done\n")
940 tmp_sat = add_salomeTools(config, tmp_working_dir)
941 d_sat = {"salomeTools" : (tmp_sat, "sat")}
943 # Add a sat symbolic link if not win
944 if not src.architecture.is_windows():
948 # In the jobs, os.getcwd() can fail
949 t = config.LOCAL.workdir
950 os.chdir(tmp_working_dir)
952 # create a symlink, to avoid reference with "salomeTool/.."
954 if os.path.lexists("ARCHIVES"):
955 os.remove("ARCHIVES")
956 os.symlink("../ARCHIVES", "ARCHIVES")
959 d_sat["sat archive link"] = (os.path.join(tmp_working_dir,"PROJECT", "ARCHIVES"),
960 os.path.join("PROJECT", "ARCHIVES"))
962 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
965 def get_archives(config, logger):
966 '''Find all the products that are get using an archive and all the products
967 that are get using a vcs (git, cvs, svn) repository.
969 :param config Config: The global configuration.
970 :param logger Logger: the logging instance
971 :return: the dictionary {name_product :
972 (local path of its archive, path in the package of its archive )}
973 and the list of specific configuration corresponding to the vcs
977 # Get the list of product informations
978 l_products_name = config.APPLICATION.products.keys()
979 l_product_info = src.product.get_products_infos(l_products_name,
983 for p_name, p_info in l_product_info:
984 # skip product with property not_in_package set to yes
985 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
987 # ignore the native and fixed products
988 if (src.product.product_is_native(p_info)
989 or src.product.product_is_fixed(p_info)):
991 if p_info.get_source == "archive":
992 archive_path = p_info.archive_info.archive_name
993 archive_name = os.path.basename(archive_path)
994 d_archives[p_name] = (archive_path,
995 os.path.join(ARCHIVE_DIR, archive_name))
996 if (src.appli_test_property(config,"pip", "yes") and
997 src.product.product_test_property(p_info,"pip", "yes")):
998 # if pip mode is activated, and product is managed by pip
999 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
1000 pip_wheel_pattern=os.path.join(pip_wheels_dir,
1001 "%s-%s*" % (p_info.name, p_info.version))
1002 pip_wheel_path=glob.glob(pip_wheel_pattern)
1003 msg_pip_not_found="Error in get_archive, pip wheel for "\
1004 "product %s-%s was not found in %s directory"
1005 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
1006 "product %s-%s were found in %s directory"
1007 if len(pip_wheel_path)==0:
1008 raise src.SatException(msg_pip_not_found %\
1009 (p_info.name, p_info.version, pip_wheels_dir))
1010 if len(pip_wheel_path)>1:
1011 raise src.SatException(msg_pip_two_or_more %\
1012 (p_info.name, p_info.version, pip_wheels_dir))
1014 pip_wheel_name=os.path.basename(pip_wheel_path[0])
1015 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0],
1016 os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
1018 # this product is not managed by archive,
1019 # an archive of the vcs directory will be created by get_archive_vcs
1020 l_pinfo_vcs.append((p_name, p_info))
1022 return d_archives, l_pinfo_vcs
1024 def add_salomeTools(config, tmp_working_dir):
1025 '''Prepare a version of salomeTools that has a specific local.pyconf file
1026 configured for a source package.
1028 :param config Config: The global configuration.
1029 :param tmp_working_dir str: The temporary local directory containing some
1030 specific directories or files needed in the
1032 :return: The path to the local salomeTools directory to add in the package
1035 # Copy sat in the temporary working directory
1036 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
1037 sat_running_path = src.Path(config.VARS.salometoolsway)
1038 sat_running_path.copy(sat_tmp_path)
1040 # Update the local.pyconf file that contains the path to the project
1041 local_pyconf_name = "local.pyconf"
1042 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
1043 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
1044 # Remove the .pyconf file in the root directory of salomeTools if there is
1045 # any. (For example when launching jobs, a pyconf file describing the jobs
1046 # can be here and is not useful)
1047 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
1048 for file_or_dir in files_or_dir_SAT:
1049 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
1050 file_path = os.path.join(tmp_working_dir,
1053 os.remove(file_path)
1055 ff = open(local_pyconf_file, "w")
1056 ff.write(LOCAL_TEMPLATE)
1059 return sat_tmp_path.path
1061 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
1062 '''For sources package that require that all products are get using an
1063 archive, one has to create some archive for the vcs products.
1064 So this method calls the clean and source command of sat and then create
1067 :param l_pinfo_vcs List: The list of specific configuration corresponding to
1069 :param sat Sat: The Sat instance that can be called to clean and source the
1071 :param config Config: The global configuration.
1072 :param logger Logger: the logging instance
1073 :param tmp_working_dir str: The temporary local directory containing some
1074 specific directories or files needed in the
1076 :return: the dictionary that stores all the archives to add in the source
1077 package. {label : (path_on_local_machine, path_in_archive)}
1080 # clean the source directory of all the vcs products, then use the source
1081 # command and thus construct an archive that will not contain the patches
1082 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
1083 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
1084 logger.write(_("\nclean sources\n"))
1085 args_clean = config.VARS.application
1086 args_clean += " --sources --products "
1087 args_clean += ",".join(l_prod_names)
1088 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
1089 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
1092 logger.write(_("get sources\n"))
1093 args_source = config.VARS.application
1094 args_source += " --products "
1095 args_source += ",".join(l_prod_names)
1096 svgDir = sat.cfg.APPLICATION.workdir
1097 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
1098 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
1099 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
1100 # DBG.write("sat config id", id(sat.cfg), True)
1101 # shit as config is not same id() as for sat.source()
1102 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
1104 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
1106 # make the new archives
1108 for pn, pinfo in l_pinfo_vcs:
1109 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
1110 logger.write("make archive vcs '%s'\n" % path_archive)
1111 d_archives_vcs[pn] = (path_archive,
1112 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
1113 sat.cfg.APPLICATION.workdir = svgDir
1114 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
1115 return d_archives_vcs
1117 def make_bin_archive(prod_name, prod_info, where):
1118 '''Create an archive of a product by searching its source directory.
1120 :param prod_name str: The name of the product.
1121 :param prod_info Config: The specific configuration corresponding to the
1123 :param where str: The path of the repository where to put the resulting
1125 :return: The path of the resulting archive
1128 path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1129 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1130 bin_path = prod_info.install_dir
1131 tar_prod.add(bin_path, arcname=path_targz_prod)
1133 return path_targz_prod
1135 def make_archive(prod_name, prod_info, where):
1136 '''Create an archive of a product by searching its source directory.
1138 :param prod_name str: The name of the product.
1139 :param prod_info Config: The specific configuration corresponding to the
1141 :param where str: The path of the repository where to put the resulting
1143 :return: The path of the resulting archive
1146 path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1147 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1148 local_path = prod_info.source_dir
1150 tar_prod.add(local_path,
1152 exclude=exclude_VCS_and_extensions_26)
1154 tar_prod.add(local_path,
1156 filter=exclude_VCS_and_extensions)
1158 return path_targz_prod
1160 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
1161 '''Create a specific project for a source package.
1163 :param config Config: The global configuration.
1164 :param tmp_working_dir str: The temporary local directory containing some
1165 specific directories or files needed in the
1167 :param with_vcs boolean: True if the package is with vcs products (not
1168 transformed into archive products)
1169 :param with_ftp boolean: True if the package use ftp servers to get archives
1170 :return: The dictionary
1171 {"project" : (produced project, project path in the archive)}
1175 # Create in the working temporary directory the full project tree
1176 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1177 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
1179 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
1182 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
1185 patches_tmp_dir = os.path.join(project_tmp_dir,
1188 application_tmp_dir = os.path.join(project_tmp_dir,
1190 for directory in [project_tmp_dir,
1191 compil_scripts_tmp_dir,
1192 env_scripts_tmp_dir,
1194 application_tmp_dir]:
1195 src.ensure_path_exists(directory)
1197 # Create the pyconf that contains the information of the project
1198 project_pyconf_name = "project.pyconf"
1199 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1200 ff = open(project_pyconf_file, "w")
1201 ff.write(PROJECT_TEMPLATE)
1202 if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1203 ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
1204 for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1205 ftp_path=ftp_path+":"+ftpserver
1207 ff.write("# ftp servers where to search for prerequisite archives\n")
1209 # add licence paths if any
1210 if len(config.PATHS.LICENCEPATH) > 0:
1211 licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1212 for path in config.PATHS.LICENCEPATH[1:]:
1213 licence_path=licence_path+":"+path
1215 ff.write("\n# Where to search for licences\n")
1216 ff.write(licence_path)
1221 # Loop over the products to get there pyconf and all the scripts
1222 # (compilation, environment, patches)
1223 # and create the pyconf file to add to the project
1224 lproducts_name = config.APPLICATION.products.keys()
1225 l_products = src.product.get_products_infos(lproducts_name, config)
1226 for p_name, p_info in l_products:
1227 # skip product with property not_in_package set to yes
1228 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1230 find_product_scripts_and_pyconf(p_name,
1234 compil_scripts_tmp_dir,
1235 env_scripts_tmp_dir,
1237 products_pyconf_tmp_dir)
1239 # for the application pyconf, we write directly the config
1240 # don't search for the original pyconf file
1241 # to avoid problems with overwrite sections and rm_products key
1242 write_application_pyconf(config, application_tmp_dir)
1244 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1247 def find_product_scripts_and_pyconf(p_name,
1251 compil_scripts_tmp_dir,
1252 env_scripts_tmp_dir,
1254 products_pyconf_tmp_dir):
1255 '''Create a specific pyconf file for a given product. Get its environment
1256 script, its compilation script and patches and put it in the temporary
1257 working directory. This method is used in the source package in order to
1258 construct the specific project.
1260 :param p_name str: The name of the product.
1261 :param p_info Config: The specific configuration corresponding to the
1263 :param config Config: The global configuration.
1264 :param with_vcs boolean: True if the package is with vcs products (not
1265 transformed into archive products)
1266 :param compil_scripts_tmp_dir str: The path to the temporary compilation
1267 scripts directory of the project.
1268 :param env_scripts_tmp_dir str: The path to the temporary environment script
1269 directory of the project.
1270 :param patches_tmp_dir str: The path to the temporary patch scripts
1271 directory of the project.
1272 :param products_pyconf_tmp_dir str: The path to the temporary product
1273 scripts directory of the project.
1276 # read the pyconf of the product
1277 product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1279 # find the compilation script if any
1280 if src.product.product_has_script(p_info):
1281 compil_script_path = src.Path(p_info.compil_script)
1282 compil_script_path.copy(compil_scripts_tmp_dir)
1284 # find the environment script if any
1285 if src.product.product_has_env_script(p_info):
1286 env_script_path = src.Path(p_info.environ.env_script)
1287 env_script_path.copy(env_scripts_tmp_dir)
1289 # find the patches if any
1290 if src.product.product_has_patches(p_info):
1291 patches = src.pyconf.Sequence()
1292 for patch_path in p_info.patches:
1293 p_path = src.Path(patch_path)
1294 p_path.copy(patches_tmp_dir)
1295 patches.append(os.path.basename(patch_path), "")
1297 if (not with_vcs) and src.product.product_is_vcs(p_info):
1298 # in non vcs mode, if the product is not archive, then make it become archive.
1300 # depending upon the incremental mode, select impacted sections
1301 if "properties" in p_info and "incremental" in p_info.properties and\
1302 p_info.properties.incremental == "yes":
1303 sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1305 sections = [p_info.section]
1306 for section in sections:
1307 if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1308 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1310 product_pyconf_cfg[section].get_source = "archive"
1311 if not "archive_info" in product_pyconf_cfg[section]:
1312 product_pyconf_cfg[section].addMapping("archive_info",
1313 src.pyconf.Mapping(product_pyconf_cfg),
1315 product_pyconf_cfg[section].archive_info.archive_name =\
1316 p_info.name + ".tgz"
1318 if (with_vcs) and src.product.product_is_vcs(p_info):
1319 # in vcs mode we must replace explicitely the git server url
1320 # (or it will not be found later because project files are not exported in archives)
1321 for section in product_pyconf_cfg:
1322 # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
1323 if "git_info" in product_pyconf_cfg[section]:
1324 for repo in product_pyconf_cfg[section].git_info:
1325 if repo in p_info.git_info:
1326 product_pyconf_cfg[section].git_info[repo] = p_info.git_info[repo]
1328 # write the pyconf file to the temporary project location
1329 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1331 ff = open(product_tmp_pyconf_path, 'w')
1332 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1333 product_pyconf_cfg.__save__(ff, 1)
1337 def write_application_pyconf(config, application_tmp_dir):
1338 '''Write the application pyconf file in the specific temporary
1339 directory containing the specific project of a source package.
1341 :param config Config: The global configuration.
1342 :param application_tmp_dir str: The path to the temporary application
1343 scripts directory of the project.
1345 application_name = config.VARS.application
1346 # write the pyconf file to the temporary application location
1347 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1348 application_name + ".pyconf")
1349 with open(application_tmp_pyconf_path, 'w') as f:
1350 f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1351 res = src.pyconf.Config()
1352 app = src.pyconf.deepCopyMapping(config.APPLICATION)
1354 # set base mode to "no" for the archive
1357 # Change the workdir
1358 app.workdir = src.pyconf.Reference(
1361 'VARS.salometoolsway + $VARS.sep + ".."')
1362 res.addMapping("APPLICATION", app, "")
1363 res.__save__(f, evaluated=False)
1366 def sat_package(config, tmp_working_dir, options, logger):
1367 '''Prepare a dictionary that stores all the needed directories and files to
1368 add in a salomeTool package.
1370 :param tmp_working_dir str: The temporary local working directory
1371 :param options OptResult: the options of the launched command
1372 :return: the dictionary that stores all the needed directories and files to
1373 add in a salomeTool package.
1374 {label : (path_on_local_machine, path_in_archive)}
1379 # we include sat himself
1380 d_project["all_sat"]=(config.VARS.salometoolsway, "")
1382 # and we overwrite local.pyconf with a clean wersion.
1383 local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1384 local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1385 local_cfg = src.pyconf.Config(local_file_path)
1386 local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1387 local_cfg.LOCAL["base"] = "default"
1388 local_cfg.LOCAL["workdir"] = "default"
1389 local_cfg.LOCAL["log_dir"] = "default"
1390 local_cfg.LOCAL["archive_dir"] = "default"
1391 local_cfg.LOCAL["VCS"] = "None"
1392 local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1394 # if the archive contains a project, we write its relative path in local.pyconf
1396 project_arch_path = os.path.join("projects", options.project,
1397 os.path.basename(options.project_file_path))
1398 local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1400 ff = open(local_pyconf_tmp_path, 'w')
1401 local_cfg.__save__(ff, 1)
1403 d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1407 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1408 '''Prepare a dictionary that stores all the needed directories and files to
1409 add in a project package.
1411 :param project_file_path str: The path to the local project.
1412 :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1413 :param tmp_working_dir str: The temporary local directory containing some
1414 specific directories or files needed in the
1416 :param embedded_in_sat boolean : the project package is embedded in a sat package
1417 :return: the dictionary that stores all the needed directories and files to
1418 add in a project package.
1419 {label : (path_on_local_machine, path_in_archive)}
1423 # Read the project file and get the directories to add to the package
1426 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1429 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1430 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1431 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1433 paths = {"APPLICATIONPATH" : "applications",
1434 "PRODUCTPATH" : "products",
1436 "MACHINEPATH" : "machines"}
1438 paths["ARCHIVEPATH"] = "archives"
1440 # Loop over the project paths and add it
1441 project_file_name = os.path.basename(project_file_path)
1443 if path not in project_pyconf_cfg:
1446 dest_path = os.path.join("projects", name_project, paths[path])
1447 project_file_dest = os.path.join("projects", name_project, project_file_name)
1449 dest_path = paths[path]
1450 project_file_dest = project_file_name
1452 # Add the directory to the files to add in the package
1453 d_project[path] = (project_pyconf_cfg[path], dest_path)
1455 # Modify the value of the path in the package
1456 project_pyconf_cfg[path] = src.pyconf.Reference(
1459 'project_path + "/' + paths[path] + '"')
1461 # Modify some values
1462 if "project_path" not in project_pyconf_cfg:
1463 project_pyconf_cfg.addMapping("project_path",
1464 src.pyconf.Mapping(project_pyconf_cfg),
1466 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1469 # we don't want to export these two fields
1470 project_pyconf_cfg.__delitem__("file_path")
1471 project_pyconf_cfg.__delitem__("PWD")
1473 project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1475 # Write the project pyconf file
1476 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1477 ff = open(project_pyconf_tmp_path, 'w')
1478 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1479 project_pyconf_cfg.__save__(ff, 1)
1481 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1485 def add_readme(config, options, where):
1486 readme_path = os.path.join(where, "README")
1487 with codecs.open(readme_path, "w", 'utf-8') as f:
1489 # templates for building the header
1491 # This package was generated with sat $version
1494 # Distribution : $dist
1496 In the following, $$ROOT represents the directory where you have installed
1497 SALOME (the directory where this file is located).
1500 if src.architecture.is_windows():
1501 readme_header = readme_header.replace('$$ROOT','%ROOT%')
1502 readme_compilation_with_binaries="""
1504 compilation based on the binaries used as prerequisites
1505 =======================================================
1507 If you fail to compile the complete application (for example because
1508 you are not root on your system and cannot install missing packages), you
1509 may try a partial compilation based on the binaries.
1510 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1511 and do some substitutions on cmake and .la files (replace the build directories
1513 The procedure to do it is:
1514 1) Remove or rename INSTALL directory if it exists
1515 2) Execute the shell script install_bin.sh:
1518 3) Use SalomeTool (as explained in Sources section) and compile only the
1519 modules you need to (with -p option)
1522 readme_header_tpl=string.Template(readme_header)
1523 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1524 "README_BIN.template")
1525 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1526 "README_LAUNCHER.template")
1527 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1528 "README_BIN_VIRTUAL_APP.template")
1529 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1530 "README_SRC.template")
1531 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1532 "README_PROJECT.template")
1533 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1534 "README_SAT.template")
1536 # prepare substitution dictionary
1538 d['user'] = config.VARS.user
1539 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1540 d['version'] = src.get_salometool_version(config)
1541 d['dist'] = config.VARS.dist
1542 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1544 if options.binaries or options.sources:
1545 d['application'] = config.VARS.application
1546 d['BINARIES'] = config.INTERNAL.config.binary_dir
1547 d['SEPARATOR'] = config.VARS.sep
1548 if src.architecture.is_windows():
1549 d['operatingSystem'] = 'Windows'
1550 d['PYTHON3'] = 'python3'
1551 d['ROOT'] = '%ROOT%'
1553 d['operatingSystem'] = 'Linux'
1556 f.write("# Application: " + d['application'] + "\n")
1557 if 'KERNEL' in config.APPLICATION.products:
1558 VersionSalome = src.get_salome_version(config)
1559 # Case where SALOME has the launcher that uses the SalomeContext API
1560 if VersionSalome >= 730:
1561 d['launcher'] = config.APPLICATION.profile.launcher_name
1563 d['virtual_app'] = 'runAppli' # this info is not used now)
1565 # write the specific sections
1566 if options.binaries:
1567 f.write(src.template.substitute(readme_template_path_bin, d))
1568 if "virtual_app" in d:
1569 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1571 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1574 f.write(src.template.substitute(readme_template_path_src, d))
1576 if options.binaries and options.sources and not src.architecture.is_windows():
1577 f.write(readme_compilation_with_binaries)
1580 f.write(src.template.substitute(readme_template_path_pro, d))
1583 f.write(src.template.substitute(readme_template_path_sat, d))
1587 def update_config(config, logger, prop, value):
1588 '''Remove from config.APPLICATION.products the products that have the property given as input.
1590 :param config Config: The global config.
1591 :param prop str: The property to filter
1592 :param value str: The value of the property to filter
1594 # if there is no APPLICATION (ex sat package -t) : nothing to do
1595 if "APPLICATION" in config:
1596 l_product_to_remove = []
1597 for product_name in config.APPLICATION.products.keys():
1598 prod_cfg = src.product.get_product_config(config, product_name)
1599 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1600 l_product_to_remove.append(product_name)
1601 for product_name in l_product_to_remove:
1602 config.APPLICATION.products.__delitem__(product_name)
1603 logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1606 '''method that is called when salomeTools is called with --help option.
1608 :return: The text to display for the package command description.
1612 The package command creates a tar file archive of a product.
1613 There are four kinds of archive, which can be mixed:
1615 1 - The binary archive.
1616 It contains the product installation directories plus a launcher.
1617 2 - The sources archive.
1618 It contains the product archives, a project (the application plus salomeTools).
1619 3 - The project archive.
1620 It contains a project (give the project file path as argument).
1621 4 - The salomeTools archive.
1622 It contains code utility salomeTools.
1625 >> sat package SALOME-master --binaries --sources""")
1627 def run(args, runner, logger):
1628 '''method that is called when salomeTools is called with package parameter.
1632 (options, args) = parser.parse_args(args)
1635 # Check that a type of package is called, and only one
1636 all_option_types = (options.binaries,
1638 options.project not in ["", None],
1640 options.bin_products)
1642 # Check if no option for package type
1643 if all_option_types.count(True) == 0:
1644 msg = _("Error: Precise a type for the package\nUse one of the "
1645 "following options: --binaries, --sources, --project or"
1646 " --salometools, --bin_products")
1647 logger.write(src.printcolors.printcError(msg), 1)
1648 logger.write("\n", 1)
1650 do_create_package = options.binaries or options.sources or options.project or options.sat
1652 if options.bin_products:
1653 ret = bin_products_archives(runner.cfg, logger, options.with_vcs)
1656 if not do_create_package:
1659 # continue to create a tar.gz package
1661 # The repository where to put the package if not Binary or Source
1662 package_default_path = runner.cfg.LOCAL.workdir
1663 # if the package contains binaries or sources:
1664 if options.binaries or options.sources or options.bin_products:
1665 # Check that the command has been called with an application
1666 src.check_config_has_application(runner.cfg)
1668 # Display information
1669 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1670 runner.cfg.VARS.application), 1)
1672 # Get the default directory where to put the packages
1673 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1674 src.ensure_path_exists(package_default_path)
1676 # if the package contains a project:
1678 # check that the project is visible by SAT
1679 projectNameFile = options.project + ".pyconf"
1681 for i in runner.cfg.PROJECTS.project_file_paths:
1682 baseName = os.path.basename(i)
1683 if baseName == projectNameFile:
1687 if foundProject is None:
1688 local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1689 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1693 Please add it in file:
1695 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1696 logger.write(src.printcolors.printcError(msg), 1)
1697 logger.write("\n", 1)
1700 options.project_file_path = foundProject
1701 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1703 # Remove the products that are filtered by the --without_properties option
1704 if options.without_properties:
1705 prop, value = options.without_properties
1706 update_config(runner.cfg, logger, prop, value)
1708 # Remove from config the products that have the not_in_package property
1709 update_config(runner.cfg, logger, "not_in_package", "yes")
1711 # get the name of the archive or build it
1713 if os.path.basename(options.name) == options.name:
1714 # only a name (not a path)
1715 archive_name = options.name
1716 dir_name = package_default_path
1718 archive_name = os.path.basename(options.name)
1719 dir_name = os.path.dirname(options.name)
1721 # suppress extension
1722 if archive_name[-len(".tgz"):] == ".tgz":
1723 archive_name = archive_name[:-len(".tgz")]
1724 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1725 archive_name = archive_name[:-len(".tar.gz")]
1729 dir_name = package_default_path
1730 if options.binaries or options.sources:
1731 archive_name = runner.cfg.APPLICATION.name
1733 if options.binaries:
1734 archive_name += "-"+runner.cfg.VARS.dist
1737 archive_name += "-SRC"
1738 if options.with_vcs:
1739 archive_name += "-VCS"
1742 archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1747 archive_name += ("satproject_" + options.project)
1749 if len(archive_name)==0: # no option worked
1750 msg = _("Error: Cannot name the archive\n"
1751 " check if at least one of the following options was "
1752 "selected : --binaries, --sources, --project or"
1754 logger.write(src.printcolors.printcError(msg), 1)
1755 logger.write("\n", 1)
1758 path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1760 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1762 # Create a working directory for all files that are produced during the
1763 # package creation and that will be removed at the end of the command
1764 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1765 src.ensure_path_exists(tmp_working_dir)
1766 logger.write("\n", 5)
1767 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1769 logger.write("\n", 3)
1771 msg = _("Preparation of files to add to the archive")
1772 logger.write(src.printcolors.printcLabel(msg), 2)
1773 logger.write("\n", 2)
1775 d_files_to_add={} # content of the archive
1777 # a dict to hold paths that will need to be substitute for users recompilations
1778 d_paths_to_substitute={}
1780 if options.binaries:
1781 d_bin_files_to_add = binary_package(runner.cfg,
1785 # for all binaries dir, store the substitution that will be required
1786 # for extra compilations
1787 for key in d_bin_files_to_add:
1788 if key.endswith("(bin)"):
1789 source_dir = d_bin_files_to_add[key][0]
1790 path_in_archive = d_bin_files_to_add[key][1].replace(
1791 runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1792 runner.cfg.INTERNAL.config.install_dir)
1793 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1794 # if basename is the same we will just substitute the dirname
1795 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1796 os.path.dirname(path_in_archive)
1798 d_paths_to_substitute[source_dir]=path_in_archive
1800 d_files_to_add.update(d_bin_files_to_add)
1802 d_files_to_add.update(source_package(runner,
1807 if options.binaries:
1808 # for archives with bin and sources we provide a shell script able to
1809 # install binaries for compilation
1810 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1812 d_paths_to_substitute,
1814 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1815 logger.write("substitutions that need to be done later : \n", 5)
1816 logger.write(str(d_paths_to_substitute), 5)
1817 logger.write("\n", 5)
1819 # --salomeTool option is not considered when --sources is selected, as this option
1820 # already brings salomeTool!
1822 d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir,
1826 DBG.write("config for package %s" % options.project, runner.cfg)
1827 d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1829 if not(d_files_to_add):
1830 msg = _("Error: Empty dictionnary to build the archive!\n")
1831 logger.write(src.printcolors.printcError(msg), 1)
1832 logger.write("\n", 1)
1835 # Add the README file in the package
1836 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1837 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1839 # Add the additional files of option add_files
1840 if options.add_files:
1841 for file_path in options.add_files:
1842 if not os.path.exists(file_path):
1843 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1845 file_name = os.path.basename(file_path)
1846 d_files_to_add[file_name] = (file_path, file_name)
1848 logger.write("\n", 2)
1849 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1850 logger.write("\n", 2)
1851 logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1855 # Creating the object tarfile
1856 tar = tarfile.open(path_targz, mode='w:gz')
1858 # get the filtering function if needed
1860 filter_function = exclude_VCS_and_extensions_26
1862 filter_function = exclude_VCS_and_extensions
1864 # Add the files to the tarfile object
1865 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1867 except KeyboardInterrupt:
1868 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1869 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1870 # remove the working directory
1871 shutil.rmtree(tmp_working_dir)
1872 logger.write(_("OK"), 1)
1873 logger.write(_("\n"), 1)
1876 # case if no application, only package sat as 'sat package -t'
1878 app = runner.cfg.APPLICATION
1882 # unconditionaly remove the tmp_local_working_dir
1884 tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1885 if os.path.isdir(tmp_local_working_dir):
1886 shutil.rmtree(tmp_local_working_dir)
1888 # remove the tmp directory, unless user has registered as developer
1889 if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1890 shutil.rmtree(tmp_working_dir)
1892 # Print again the path of the package
1893 logger.write("\n", 2)
1894 src.printcolors.print_value(logger, "Package path", path_targz, 2)