3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
31 from application import get_SALOME_modules
32 from src.versionMinorMajorPatch import MinorMajorPatch as MMP
33 import src.debug as DBG
35 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
42 ARCHIVE_DIR = "ARCHIVES"
43 PROJECT_DIR = "PROJECT"
45 IGNORED_DIRS = [".git", ".svn"]
46 IGNORED_EXTENSIONS = []
48 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
50 if src.architecture.is_windows():
51 PROJECT_TEMPLATE = """#!/usr/bin/env python
54 # The path to the archive root directory
55 root_path : $PWD + "/../"
57 project_path : $PWD + "/"
59 # Where to search the archives of the products
60 ARCHIVEPATH : $root_path + "ARCHIVES"
61 # Where to search the pyconf of the applications
62 APPLICATIONPATH : $project_path + "applications/"
63 # Where to search the pyconf of the products
64 PRODUCTPATH : $project_path + "products/"
65 # Where to search the pyconf of the jobs of the project
66 JOBPATH : $project_path + "jobs/"
67 # Where to search the pyconf of the machines of the project
68 MACHINEPATH : $project_path + "machines/"
71 PROJECT_TEMPLATE = """#!/usr/bin/env python
75 project_path : $PWD + "/"
77 # Where to search the archives of the products
78 ARCHIVEPATH : $project_path + "ARCHIVES"
79 # Where to search the pyconf of the applications
80 APPLICATIONPATH : $project_path + "applications/"
81 # Where to search the pyconf of the products
82 PRODUCTPATH : $project_path + "products/"
83 # Where to search the pyconf of the jobs of the project
84 JOBPATH : $project_path + "jobs/"
85 # Where to search the pyconf of the machines of the project
86 MACHINEPATH : $project_path + "machines/"
90 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
98 archive_dir : 'default'
107 $LOCAL.workdir + $VARS.sep + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"
112 # Define all possible option for the package command : sat package <options>
113 parser = src.options.Options()
114 parser.add_option('b', 'binaries', 'boolean', 'binaries',
115 _('Optional: Produce a binary package.'), False)
116 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
117 _('Optional: Only binary package: produce the archive even if '
118 'there are some missing products.'), False)
119 parser.add_option('s', 'sources', 'boolean', 'sources',
120 _('Optional: Produce a compilable archive of the sources of the '
121 'application.'), False)
122 parser.add_option('', 'bin_products', 'boolean', 'bin_products',
123 _('Optional: Create binary archives for all products.'), False)
124 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
125 _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
126 'Sat prepare will use VCS mode instead to retrieve them.'
127 '\n Also, when combined with "--bin_products" option, restrict the building of product archives to VCS products.'),
129 parser.add_option('', 'ftp', 'boolean', 'ftp',
130 _('Optional: Do not embed archives for products in archive mode.'
131 'Sat prepare will use ftp instead to retrieve them'),
133 parser.add_option('e', 'exe', 'string', 'exe',
134 _('Optional: Produce an extra launcher based upon the exe given as argument.'), "")
135 parser.add_option('p', 'project', 'string', 'project',
136 _('Optional: Produce an archive that contains a project.'), "")
137 parser.add_option('t', 'salometools', 'boolean', 'sat',
138 _('Optional: Produce an archive that contains salomeTools.'), False)
139 parser.add_option('n', 'name', 'string', 'name',
140 _('Optional: The name or full path of the archive.'), None)
141 parser.add_option('', 'add_files', 'list2', 'add_files',
142 _('Optional: The list of additional files to add to the archive.'), [])
143 parser.add_option('', 'without_properties', 'properties', 'without_properties',
144 _('Optional: Filter the products by their properties.\n\tSyntax: '
145 '--without_properties <property>:<value>'))
148 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
149 '''Create an archive containing all directories and files that are given in
150 the d_content argument.
152 :param tar tarfile: The tarfile instance used to make the archive.
153 :param name_archive str: The name of the archive to make.
154 :param d_content dict: The dictionary that contain all directories and files
155 to add in the archive.
157 (path_on_local_machine, path_in_archive)
158 :param logger Logger: the logging instance
159 :param f_exclude Function: the function that filters
160 :return: 0 if success, 1 if not.
163 # get the max length of the messages in order to make the display
164 max_len = len(max(d_content.keys(), key=len))
167 # loop over each directory or file stored in the d_content dictionary
168 names = sorted(d_content.keys())
169 DBG.write("add tar names", names)
171 # used to avoid duplications (for pip install in python, or single_install_dir cases)
174 # display information
175 len_points = max_len - len(name) + 3
176 local_path, archive_path = d_content[name]
177 in_archive = os.path.join(name_archive, archive_path)
178 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
179 # Get the local path and the path in archive
180 # of the directory or file to add
181 # Add it in the archive
183 key=local_path+"->"+in_archive
184 if key not in already_added:
188 exclude=exclude_VCS_and_extensions_26)
192 filter=exclude_VCS_and_extensions)
193 already_added.add(key)
194 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
195 except Exception as e:
196 logger.write(src.printcolors.printcError(_("KO ")), 3)
197 logger.write(str(e), 3)
199 logger.write("\n", 3)
203 def exclude_VCS_and_extensions_26(filename):
204 ''' The function that is used to exclude from package the link to the
205 VCS repositories (like .git) (only for python 2.6)
207 :param filename Str: The filname to exclude (or not).
208 :return: True if the file has to be exclude
211 for dir_name in IGNORED_DIRS:
212 if dir_name in filename:
214 for extension in IGNORED_EXTENSIONS:
215 if filename.endswith(extension):
219 def exclude_VCS_and_extensions(tarinfo):
220 ''' The function that is used to exclude from package the link to the
221 VCS repositories (like .git)
223 :param filename Str: The filname to exclude (or not).
224 :return: None if the file has to be exclude
225 :rtype: tarinfo or None
227 filename = tarinfo.name
228 for dir_name in IGNORED_DIRS:
229 if dir_name in filename:
231 for extension in IGNORED_EXTENSIONS:
232 if filename.endswith(extension):
236 def produce_relative_launcher(config,
241 '''Create a specific SALOME launcher for the binary package. This launcher
244 :param config Config: The global configuration.
245 :param logger Logger: the logging instance
246 :param file_dir str: the directory where to put the launcher
247 :param file_name str: The launcher name
248 :param binaries_dir_name str: the name of the repository where the binaries
250 :return: the path of the produced launcher
254 # set base mode to "no" for the archive - save current mode to restore it at the end
255 if "base" in config.APPLICATION:
256 base_setting=config.APPLICATION.base
259 config.APPLICATION.base="no"
261 # get KERNEL installation path
262 kernel_info = src.product.get_product_config(config, "KERNEL")
263 kernel_base_name=os.path.basename(kernel_info.install_dir)
264 if kernel_info.install_mode == "base":
265 # case of kernel installed in base. the kernel install dir name is different in the archive
266 kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
268 kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
270 # set kernel bin dir (considering fhs property)
271 kernel_cfg = src.product.get_product_config(config, "KERNEL")
272 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
273 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
275 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
277 # check if the application contains an application module
278 # check also if the application has a distene product,
279 # in this case get its licence file name
280 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
281 salome_application_name="Not defined"
282 distene_licence_file_name=False
283 for prod_name, prod_info in l_product_info:
284 # look for a "salome application" and a distene product
285 if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
286 distene_licence_file_name = src.product.product_has_licence(prod_info,
287 config.PATHS.LICENCEPATH)
288 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
289 salome_application_name=prod_info.name
291 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
292 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
293 if salome_application_name == "Not defined":
294 app_root_dir=kernel_root_dir
296 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
299 additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
300 config.VARS.sep + bin_kernel_install_dir
301 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
302 additional_env['sat_python_version'] = 3
304 additional_env['sat_python_version'] = 2
306 additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
307 launcher_name = src.get_launcher_name(config)
308 additional_env['APPLI'] = "out_dir_Path" + config.VARS.sep + file_name
310 # create an environment file writer
311 writer = src.environment.FileEnvWriter(config,
317 filepath = os.path.join(file_dir, file_name)
319 writer.write_env_file(filepath,
322 additional_env=additional_env,
324 for_package = binaries_dir_name)
326 # Little hack to put out_dir_Path outside the strings
327 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
328 src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
330 # A hack to put a call to a file for distene licence.
331 # It does nothing to an application that has no distene product
332 if distene_licence_file_name:
333 logger.write("Application has a distene licence file! We use it in package launcher", 5)
334 hack_for_distene_licence(filepath, distene_licence_file_name)
336 # change the rights in order to make the file executable for everybody
346 # restore modified setting by its initial value
347 config.APPLICATION.base=base_setting
351 def hack_for_distene_licence(filepath, licence_file):
352 '''Replace the distene licence env variable by a call to a file.
354 :param filepath Str: The path to the launcher to modify.
356 shutil.move(filepath, filepath + "_old")
358 filein = filepath + "_old"
359 fin = open(filein, "r")
360 fout = open(fileout, "w")
361 text = fin.readlines()
362 # Find the Distene section
364 for i,line in enumerate(text):
365 if "# Set DISTENE License" in line:
369 # No distene product, there is nothing to do
375 del text[num_line +1]
376 del text[num_line +1]
377 text_to_insert =""" try:
378 distene_licence_file=r"%s"
379 if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
380 import importlib.util
381 spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
382 distene=importlib.util.module_from_spec(spec_dist)
383 spec_dist.loader.exec_module(distene)
386 distene = imp.load_source('distene_licence', distene_licence_file)
387 distene.set_distene_variables(context)
389 pass\n""" % licence_file
390 text.insert(num_line + 1, text_to_insert)
397 def produce_relative_env_files(config,
402 '''Create some specific environment files for the binary package. These
403 files use relative paths.
405 :param config Config: The global configuration.
406 :param logger Logger: the logging instance
407 :param file_dir str: the directory where to put the files
408 :param binaries_dir_name str: the name of the repository where the binaries
410 :param exe_name str: if given generate a launcher executing exe_name
411 :return: the list of path of the produced environment files
415 # set base mode to "no" for the archive - save current mode to restore it at the end
416 if "base" in config.APPLICATION:
417 base_setting=config.APPLICATION.base
420 config.APPLICATION.base="no"
422 # create an environment file writer
423 writer = src.environment.FileEnvWriter(config,
428 if src.architecture.is_windows():
430 filename = "env_launch.bat"
433 filename = "env_launch.sh"
436 filename=os.path.basename(exe_name)
439 filepath = writer.write_env_file(filename,
442 for_package = binaries_dir_name)
444 # Little hack to put out_dir_Path as environment variable
445 if src.architecture.is_windows() :
446 src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
447 src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
448 src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
450 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
451 src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
452 src.replace_in_file(filepath, ';out_dir_Path', ';${out_dir_Path}' )
455 if src.architecture.is_windows():
456 cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
458 cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
459 with open(filepath, "a") as exe_launcher:
460 exe_launcher.write(cmd)
462 # change the rights in order to make the file executable for everybody
472 # restore modified setting by its initial value
473 config.APPLICATION.base=base_setting
477 def produce_install_bin_file(config,
482 '''Create a bash shell script which do substitutions in BIRARIES dir
483 in order to use it for extra compilations.
485 :param config Config: The global configuration.
486 :param logger Logger: the logging instance
487 :param file_dir str: the directory where to put the files
488 :param d_sub, dict: the dictionnary that contains the substitutions to be done
489 :param file_name str: the name of the install script file
490 :return: the produced file
494 filepath = os.path.join(file_dir, file_name)
495 # open the file and write into it
496 # use codec utf-8 as sat variables are in unicode
497 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
498 installbin_template_path = os.path.join(config.VARS.internal_dir,
499 "INSTALL_BIN.template")
501 # build the name of the directory that will contain the binaries
502 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
503 # build the substitution loop
504 loop_cmd = "for f in $(grep -RIl"
506 loop_cmd += " -e "+ key
507 loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
510 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
511 loop_cmd += ' " $f\ndone'
514 d["BINARIES_DIR"] = binaries_dir_name
515 d["SUBSTITUTION_LOOP"]=loop_cmd
516 d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
518 # substitute the template and write it in file
519 content=src.template.substitute(installbin_template_path, d)
520 installbin_file.write(content)
521 # change the rights in order to make the file executable for everybody
533 def product_appli_creation_script(config,
537 '''Create a script that can produce an application (EDF style) in the binary
540 :param config Config: The global configuration.
541 :param logger Logger: the logging instance
542 :param file_dir str: the directory where to put the file
543 :param binaries_dir_name str: the name of the repository where the binaries
545 :return: the path of the produced script file
548 template_name = "create_appli.py.for_bin_packages.template"
549 template_path = os.path.join(config.VARS.internal_dir, template_name)
550 text_to_fill = open(template_path, "r").read()
551 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
552 '"' + binaries_dir_name + '"')
555 for product_name in get_SALOME_modules(config):
556 product_info = src.product.get_product_config(config, product_name)
558 if src.product.product_is_smesh_plugin(product_info):
561 if 'install_dir' in product_info and bool(product_info.install_dir):
562 if src.product.product_is_cpp(product_info):
564 for cpp_name in src.product.get_product_components(product_info):
565 line_to_add = ("<module name=\"" +
567 "\" gui=\"yes\" path=\"''' + "
568 "os.path.join(dir_bin_name, \"" +
569 cpp_name + "\") + '''\"/>")
572 line_to_add = ("<module name=\"" +
574 "\" gui=\"yes\" path=\"''' + "
575 "os.path.join(dir_bin_name, \"" +
576 product_name + "\") + '''\"/>")
577 text_to_add += line_to_add + "\n"
579 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
581 tmp_file_path = os.path.join(file_dir, "create_appli.py")
582 ff = open(tmp_file_path, "w")
583 ff.write(filled_text)
586 # change the rights in order to make the file executable for everybody
587 os.chmod(tmp_file_path,
598 def bin_products_archives(config, logger, only_vcs):
599 '''Prepare binary packages for all products
600 :param config Config: The global configuration.
601 :return: the error status
605 logger.write("Make %s binary archives\n" % config.VARS.dist)
606 # Get the default directory where to put the packages
607 binpackage_path = os.path.join(config.APPLICATION.workdir, "PACKAGE", "products")
608 src.ensure_path_exists(binpackage_path)
609 # Get the list of product installation to add to the archive
610 l_products_name = sorted(config.APPLICATION.products.keys())
611 l_product_info = src.product.get_products_infos(l_products_name,
613 # first loop on products : filter products, analyse properties,
614 # and store the information that will be used to create the archive in the second loop
615 l_not_installed=[] # store not installed products for warning at the end
616 for prod_name, prod_info in l_product_info:
617 # ignore the native and fixed products for install directories
618 if (src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes"
619 or src.product.product_is_native(prod_info)
620 or src.product.product_is_fixed(prod_info)
621 or not src.product.product_compiles(prod_info)):
623 if only_vcs and not src.product.product_is_vcs(prod_info):
625 if not src.product.check_installation(config, prod_info):
626 l_not_installed.append(prod_name)
627 continue # product is not installed, we skip it
628 # prepare call to make_bin_archive
629 path_targz_prod = os.path.join(binpackage_path, prod_name + '-' + prod_info.version.replace("/", "_") + "-" + config.VARS.dist + PACKAGE_EXT)
630 targz_prod = tarfile.open(path_targz_prod, mode='w:gz')
631 bin_path = prod_info.install_dir
632 targz_prod.add(bin_path)
634 # Python program to find MD5 hash value of a file
636 with open(path_targz_prod,"rb") as f:
637 bytes = f.read() # read file as bytes
638 readable_hash = hashlib.md5(bytes).hexdigest();
639 with open(path_targz_prod+".md5", "w") as md5sum:
640 md5sum.write("%s %s" % (readable_hash, os.path.basename(path_targz_prod)))
641 logger.write(" archive : %s (md5sum = %s)\n" % (path_targz_prod, readable_hash))
645 def binary_package(config, logger, options, tmp_working_dir):
646 '''Prepare a dictionary that stores all the needed directories and files to
647 add in a binary package.
649 :param config Config: The global configuration.
650 :param logger Logger: the logging instance
651 :param options OptResult: the options of the launched command
652 :param tmp_working_dir str: The temporary local directory containing some
653 specific directories or files needed in the
655 :return: the dictionary that stores all the needed directories and files to
656 add in a binary package.
657 {label : (path_on_local_machine, path_in_archive)}
661 # Get the list of product installation to add to the archive
662 l_products_name = sorted(config.APPLICATION.products.keys())
663 l_product_info = src.product.get_products_infos(l_products_name,
666 # suppress compile time products for binaries-only archives
667 if not options.sources:
668 update_config(config, logger, "compile_time", "yes")
673 l_sources_not_present = []
674 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
675 if ("APPLICATION" in config and
676 "properties" in config.APPLICATION and
677 "mesa_launcher_in_package" in config.APPLICATION.properties and
678 config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
679 generate_mesa_launcher=True
681 has_properties = "APPLICATION" in config and "properties" in config.APPLICATION
682 # first loop on products : filter products, analyse properties,
683 # and store the information that will be used to create the archive in the second loop
684 for prod_name, prod_info in l_product_info:
685 # skip product with property not_in_package set to yes
686 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
689 if src.product.product_is_not_opensource(prod_info) and src.check_git_repository_has_non_opensource( cfg, git_server):
692 # Add the sources of the products that have the property
693 # sources_in_package : "yes"
694 if src.get_property_in_product_cfg(prod_info,
695 "sources_in_package") == "yes":
696 if os.path.exists(prod_info.source_dir):
697 l_source_dir.append((prod_name, prod_info.source_dir))
699 l_sources_not_present.append(prod_name)
701 # ignore the native and fixed products for install directories
702 if (src.product.product_is_native(prod_info)
703 or src.product.product_is_fixed(prod_info)
704 or not src.product.product_compiles(prod_info)):
707 # products with single_dir property will be installed in the PRODUCTS directory of the archive
708 is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
709 src.product.product_test_property(prod_info,"single_install_dir", "yes"))
710 if src.product.check_installation(config, prod_info):
711 l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir,
712 is_single_dir, prod_info.install_mode))
714 l_not_installed.append(prod_name)
716 # Add also the cpp generated modules (if any)
717 if src.product.product_is_cpp(prod_info):
719 for name_cpp in src.product.get_product_components(prod_info):
720 install_dir = os.path.join(config.APPLICATION.workdir,
721 config.INTERNAL.config.install_dir,
723 if os.path.exists(install_dir):
724 l_install_dir.append((name_cpp, name_cpp, install_dir, False, "value"))
726 l_not_installed.append(name_cpp)
728 # check the name of the directory that (could) contains the binaries
729 # from previous detar
730 binaries_from_detar = os.path.join(
731 config.APPLICATION.workdir,
732 config.INTERNAL.config.binary_dir + config.VARS.dist)
733 if os.path.exists(binaries_from_detar):
735 WARNING: existing binaries directory from previous detar installation:
737 To make new package from this, you have to:
738 1) install binaries in INSTALL directory with the script "install_bin.sh"
739 see README file for more details
740 2) or recompile everything in INSTALL with "sat compile" command
741 this step is long, and requires some linux packages to be installed
743 """ % binaries_from_detar)
745 # Print warning or error if there are some missing products
746 if len(l_not_installed) > 0:
747 text_missing_prods = ""
748 for p_name in l_not_installed:
749 text_missing_prods += " - " + p_name + "\n"
750 if not options.force_creation:
751 msg = _("ERROR: there are missing product installations:")
752 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
755 raise src.SatException(msg)
757 msg = _("WARNING: there are missing products installations:")
758 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
762 # Do the same for sources
763 if len(l_sources_not_present) > 0:
764 text_missing_prods = ""
765 for p_name in l_sources_not_present:
766 text_missing_prods += "-" + p_name + "\n"
767 if not options.force_creation:
768 msg = _("ERROR: there are missing product sources:")
769 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
772 raise src.SatException(msg)
774 msg = _("WARNING: there are missing products sources:")
775 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
779 # construct the name of the directory that will contain the binaries
780 if src.architecture.is_windows():
781 binaries_dir_name = config.INTERNAL.config.binary_dir
783 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
784 # construct the correlation table between the product names, there
785 # actual install directories and there install directory in archive
787 for prod_name, prod_info_name, install_dir, is_single_dir, install_mode in l_install_dir:
788 prod_base_name=os.path.basename(install_dir)
789 if install_mode == "base":
790 # case of a products installed in base.
791 # because the archive is in base:no mode, the name of the install dir is different inside archive
792 # we set it to the product name or by PRODUCTS if single-dir
794 prod_base_name=config.INTERNAL.config.single_install_dir
796 prod_base_name=prod_info_name
797 path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
798 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
800 for prod_name, source_dir in l_source_dir:
801 path_in_archive = os.path.join("SOURCES", prod_name)
802 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
804 # create an archives of compilation logs, and insert it into the tarball
805 logpath=os.path.join(config.APPLICATION.workdir, "LOGS")
806 path_targz_logs = os.path.join(tmp_working_dir, "logs.tgz")
807 tar_log = tarfile.open(path_targz_logs, mode='w:gz')
808 tar_log.add(logpath, arcname="LOGS")
810 d_products["LOGS"] = (path_targz_logs, "logs.tgz")
812 # for packages of SALOME applications including KERNEL,
813 # we produce a salome launcher or a virtual application (depending on salome version)
814 if 'KERNEL' in config.APPLICATION.products:
815 VersionSalome = src.get_salome_version(config)
816 # Case where SALOME has the launcher that uses the SalomeContext API
817 if VersionSalome >= MMP([7,3,0]):
818 # create the relative launcher and add it to the files to add
819 launcher_name = src.get_launcher_name(config)
820 launcher_package = produce_relative_launcher(config,
825 d_products["launcher"] = (launcher_package, launcher_name)
827 # if the application contains mesa products, we generate in addition to the
828 # classical salome launcher a launcher using mesa and called mesa_salome
829 # (the mesa launcher will be used for remote usage through ssh).
830 if generate_mesa_launcher:
831 #if there is one : store the use_mesa property
832 restore_use_mesa_option=None
833 if ('properties' in config.APPLICATION and
834 'use_mesa' in config.APPLICATION.properties):
835 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
837 # activate mesa property, and generate a mesa launcher
838 src.activate_mesa_property(config) #activate use_mesa property
839 launcher_mesa_name="mesa_"+launcher_name
840 launcher_package_mesa = produce_relative_launcher(config,
845 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
847 # if there was a use_mesa value, we restore it
848 # else we set it to the default value "no"
849 if restore_use_mesa_option != None:
850 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
852 config.APPLICATION.properties.use_mesa="no"
855 # if we mix binaries and sources, we add a copy of the launcher,
856 # prefixed with "bin",in order to avoid clashes
857 launcher_copy_name="bin"+launcher_name
858 launcher_package_copy = produce_relative_launcher(config,
863 d_products["launcher (copy)"] = (launcher_package_copy, launcher_copy_name)
865 # Provide a script for the creation of an application EDF style
866 appli_script = product_appli_creation_script(config,
871 d_products["appli script"] = (appli_script, "create_appli.py")
873 # Put also the environment file
874 env_file = produce_relative_env_files(config,
879 if src.architecture.is_windows():
880 filename = "env_launch.bat"
882 filename = "env_launch.sh"
883 d_products["environment file"] = (env_file, filename)
885 # If option exe, produce an extra launcher based on specified exe
887 exe_file = produce_relative_env_files(config,
893 if src.architecture.is_windows():
894 filename = os.path.basename(options.exe) + ".bat"
896 filename = os.path.basename(options.exe) + ".sh"
897 d_products["exe file"] = (exe_file, filename)
902 def source_package(sat, config, logger, options, tmp_working_dir):
903 '''Prepare a dictionary that stores all the needed directories and files to
904 add in a source package.
906 :param config Config: The global configuration.
907 :param logger Logger: the logging instance
908 :param options OptResult: the options of the launched command
909 :param tmp_working_dir str: The temporary local directory containing some
910 specific directories or files needed in the
912 :return: the dictionary that stores all the needed directories and files to
913 add in a source package.
914 {label : (path_on_local_machine, path_in_archive)}
919 # Get all the products that are prepared using an archive
920 # unless ftp mode is specified (in this case the user of the
921 # archive will get the sources through the ftp mode of sat prepare
923 logger.write("Find archive products ... ")
924 d_archives, l_pinfo_vcs = get_archives(config, logger)
925 logger.write("Done\n")
928 if not options.with_vcs and len(l_pinfo_vcs) > 0:
929 # Make archives with the products that are not prepared using an archive
930 # (git, cvs, svn, etc)
931 logger.write("Construct archives for vcs products ... ")
932 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
937 logger.write("Done\n")
940 logger.write("Create the project ... ")
941 d_project = create_project_for_src_package(config,
945 logger.write("Done\n")
948 tmp_sat = add_salomeTools(config, tmp_working_dir)
949 d_sat = {"salomeTools" : (tmp_sat, "sat")}
951 # Add a sat symbolic link if not win
952 if not src.architecture.is_windows():
956 # In the jobs, os.getcwd() can fail
957 t = config.LOCAL.workdir
958 os.chdir(tmp_working_dir)
960 # create a symlink, to avoid reference with "salomeTool/.."
962 if os.path.lexists("ARCHIVES"):
963 os.remove("ARCHIVES")
964 os.symlink("../ARCHIVES", "ARCHIVES")
967 d_sat["sat archive link"] = (os.path.join(tmp_working_dir,"PROJECT", "ARCHIVES"),
968 os.path.join("PROJECT", "ARCHIVES"))
970 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
973 def get_archives(config, logger):
974 '''Find all the products that are get using an archive and all the products
975 that are get using a vcs (git, cvs, svn) repository.
977 :param config Config: The global configuration.
978 :param logger Logger: the logging instance
979 :return: the dictionary {name_product :
980 (local path of its archive, path in the package of its archive )}
981 and the list of specific configuration corresponding to the vcs
985 # Get the list of product informations
986 l_products_name = config.APPLICATION.products.keys()
987 l_product_info = src.product.get_products_infos(l_products_name,
991 for p_name, p_info in l_product_info:
992 # skip product with property not_in_package set to yes
993 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
995 # ignore the native and fixed products
996 if (src.product.product_is_native(p_info)
997 or src.product.product_is_fixed(p_info)):
1000 # skip product if git server misses non opensource products
1001 is_not_prod_opensource = src.product.product_is_not_opensource(p_info)
1002 git_server = src.get_git_server(config,logger)
1003 has_git_server_non_opensource = src.check_git_repository_has_non_opensource( config, git_server)
1004 if has_git_server_non_opensource and is_not_prod_opensource:
1005 logger.warning("%s is a closed-source software and is not available on %s" % (product, git_server))
1008 if p_info.get_source == "archive":
1009 archive_path = p_info.archive_info.archive_name
1010 archive_name = os.path.basename(archive_path)
1011 d_archives[p_name] = (archive_path,
1012 os.path.join(ARCHIVE_DIR, archive_name))
1013 if (src.appli_test_property(config,"pip", "yes") and
1014 src.product.product_test_property(p_info,"pip", "yes")):
1015 # if pip mode is activated, and product is managed by pip
1016 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
1017 if "archive_prefix" in p_info.archive_info and p_info.archive_info.archive_prefix:
1018 pip_wheel_pattern=os.path.join(pip_wheels_dir,
1019 "%s-%s*" % (p_info.archive_info.archive_prefix, p_info.version))
1021 pip_wheel_pattern=os.path.join(pip_wheels_dir,
1022 "%s-%s*" % (p_info.name, p_info.version))
1023 pip_wheel_path=glob.glob(pip_wheel_pattern)
1024 msg_pip_not_found="Error in get_archive, pip wheel for "\
1025 "product %s-%s was not found in %s directory"
1026 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
1027 "product %s-%s were found in %s directory"
1028 if len(pip_wheel_path)==0:
1029 raise src.SatException(msg_pip_not_found %\
1030 (p_info.name, p_info.version, pip_wheels_dir))
1031 if len(pip_wheel_path)>1:
1032 raise src.SatException(msg_pip_two_or_more %\
1033 (p_info.name, p_info.version, pip_wheels_dir))
1035 pip_wheel_name=os.path.basename(pip_wheel_path[0])
1036 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0],
1037 os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
1039 # this product is not managed by archive,
1040 # an archive of the vcs directory will be created by get_archive_vcs
1041 l_pinfo_vcs.append((p_name, p_info))
1043 return d_archives, l_pinfo_vcs
1045 def add_salomeTools(config, tmp_working_dir):
1046 '''Prepare a version of salomeTools that has a specific local.pyconf file
1047 configured for a source package.
1049 :param config Config: The global configuration.
1050 :param tmp_working_dir str: The temporary local directory containing some
1051 specific directories or files needed in the
1053 :return: The path to the local salomeTools directory to add in the package
1056 # Copy sat in the temporary working directory
1057 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
1058 sat_running_path = src.Path(config.VARS.salometoolsway)
1059 sat_running_path.copy(sat_tmp_path)
1061 # Update the local.pyconf file that contains the path to the project
1062 local_pyconf_name = "local.pyconf"
1063 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
1064 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
1065 # Remove the .pyconf file in the root directory of salomeTools if there is
1066 # any. (For example when launching jobs, a pyconf file describing the jobs
1067 # can be here and is not useful)
1068 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
1069 for file_or_dir in files_or_dir_SAT:
1070 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
1071 file_path = os.path.join(tmp_working_dir,
1074 os.remove(file_path)
1076 ff = open(local_pyconf_file, "w")
1077 ff.write(LOCAL_TEMPLATE)
1080 return sat_tmp_path.path
1082 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
1083 '''For sources package that require that all products are get using an
1084 archive, one has to create some archive for the vcs products.
1085 So this method calls the clean and source command of sat and then create
1088 :param l_pinfo_vcs List: The list of specific configuration corresponding to
1090 :param sat Sat: The Sat instance that can be called to clean and source the
1092 :param config Config: The global configuration.
1093 :param logger Logger: the logging instance
1094 :param tmp_working_dir str: The temporary local directory containing some
1095 specific directories or files needed in the
1097 :return: the dictionary that stores all the archives to add in the source
1098 package. {label : (path_on_local_machine, path_in_archive)}
1101 # clean the source directory of all the vcs products, then use the source
1102 # command and thus construct an archive that will not contain the patches
1103 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
1104 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
1105 logger.write(_("\nclean sources\n"))
1106 args_clean = config.VARS.application
1107 args_clean += " --sources --products "
1108 args_clean += ",".join(l_prod_names)
1109 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
1110 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
1113 logger.write(_("get sources\n"))
1114 args_source = config.VARS.application
1115 args_source += " --products "
1116 args_source += ",".join(l_prod_names)
1117 svgDir = sat.cfg.APPLICATION.workdir
1118 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
1119 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
1120 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
1121 # DBG.write("sat config id", id(sat.cfg), True)
1122 # shit as config is not same id() as for sat.source()
1123 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
1125 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
1127 # make the new archives
1129 for pn, pinfo in l_pinfo_vcs:
1130 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
1131 logger.write("make archive vcs '%s'\n" % path_archive)
1132 d_archives_vcs[pn] = (path_archive,
1133 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
1134 sat.cfg.APPLICATION.workdir = svgDir
1135 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
1136 return d_archives_vcs
1138 def make_bin_archive(prod_name, prod_info, where):
1139 '''Create an archive of a product by searching its source directory.
1141 :param prod_name str: The name of the product.
1142 :param prod_info Config: The specific configuration corresponding to the
1144 :param where str: The path of the repository where to put the resulting
1146 :return: The path of the resulting archive
1149 path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1150 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1151 bin_path = prod_info.install_dir
1152 tar_prod.add(bin_path, arcname=path_targz_prod)
1154 return path_targz_prod
1156 def make_archive(prod_name, prod_info, where):
1157 '''Create an archive of a product by searching its source directory.
1159 :param prod_name str: The name of the product.
1160 :param prod_info Config: The specific configuration corresponding to the
1162 :param where str: The path of the repository where to put the resulting
1164 :return: The path of the resulting archive
1167 path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1168 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1169 local_path = prod_info.source_dir
1171 tar_prod.add(local_path,
1173 exclude=exclude_VCS_and_extensions_26)
1175 tar_prod.add(local_path,
1177 filter=exclude_VCS_and_extensions)
1179 return path_targz_prod
1181 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
1182 '''Create a specific project for a source package.
1184 :param config Config: The global configuration.
1185 :param tmp_working_dir str: The temporary local directory containing some
1186 specific directories or files needed in the
1188 :param with_vcs boolean: True if the package is with vcs products (not
1189 transformed into archive products)
1190 :param with_ftp boolean: True if the package use ftp servers to get archives
1191 :return: The dictionary
1192 {"project" : (produced project, project path in the archive)}
1196 # Create in the working temporary directory the full project tree
1197 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1198 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
1200 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
1203 post_scripts_tmp_dir = os.path.join(project_tmp_dir,
1206 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
1209 patches_tmp_dir = os.path.join(project_tmp_dir,
1212 application_tmp_dir = os.path.join(project_tmp_dir,
1214 for directory in [project_tmp_dir,
1215 compil_scripts_tmp_dir,
1216 env_scripts_tmp_dir,
1217 post_scripts_tmp_dir,
1219 application_tmp_dir]:
1220 src.ensure_path_exists(directory)
1222 # Create the pyconf that contains the information of the project
1223 project_pyconf_name = "project.pyconf"
1224 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1225 ff = open(project_pyconf_file, "w")
1226 ff.write(PROJECT_TEMPLATE)
1227 if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1228 ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
1229 for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1230 ftp_path=ftp_path+":"+ftpserver
1232 ff.write("# ftp servers where to search for prerequisite archives\n")
1234 # add licence paths if any
1235 if len(config.PATHS.LICENCEPATH) > 0:
1236 licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1237 for path in config.PATHS.LICENCEPATH[1:]:
1238 licence_path=licence_path+":"+path
1240 ff.write("\n# Where to search for licences\n")
1241 ff.write(licence_path)
1246 # Loop over the products to get there pyconf and all the scripts
1247 # (compilation, environment, patches)
1248 # and create the pyconf file to add to the project
1249 lproducts_name = config.APPLICATION.products.keys()
1250 l_products = src.product.get_products_infos(lproducts_name, config)
1251 for p_name, p_info in l_products:
1252 # skip product with property not_in_package set to yes
1253 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1255 find_product_scripts_and_pyconf(p_name,
1259 compil_scripts_tmp_dir,
1260 env_scripts_tmp_dir,
1261 post_scripts_tmp_dir,
1263 products_pyconf_tmp_dir)
1265 # for the application pyconf, we write directly the config
1266 # don't search for the original pyconf file
1267 # to avoid problems with overwrite sections and rm_products key
1268 write_application_pyconf(config, application_tmp_dir)
1270 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1273 def find_product_scripts_and_pyconf(p_name,
1277 compil_scripts_tmp_dir,
1278 env_scripts_tmp_dir,
1279 post_scripts_tmp_dir,
1281 products_pyconf_tmp_dir):
1282 '''Create a specific pyconf file for a given product. Get its environment
1283 script, its compilation script and patches and put it in the temporary
1284 working directory. This method is used in the source package in order to
1285 construct the specific project.
1287 :param p_name str: The name of the product.
1288 :param p_info Config: The specific configuration corresponding to the
1290 :param config Config: The global configuration.
1291 :param with_vcs boolean: True if the package is with vcs products (not
1292 transformed into archive products)
1293 :param compil_scripts_tmp_dir str: The path to the temporary compilation
1294 scripts directory of the project.
1295 :param env_scripts_tmp_dir str: The path to the temporary environment script
1296 directory of the project.
1297 :param post_scripts_tmp_dir str: The path to the temporary post-processing script
1298 directory of the project.
1299 :param patches_tmp_dir str: The path to the temporary patch scripts
1300 directory of the project.
1301 :param products_pyconf_tmp_dir str: The path to the temporary product
1302 scripts directory of the project.
1305 # read the pyconf of the product
1306 product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1308 # find the compilation script if any
1309 if src.product.product_has_script(p_info):
1310 compil_script_path = src.Path(p_info.compil_script)
1311 compil_script_path.copy(compil_scripts_tmp_dir)
1313 # find the environment script if any
1314 if src.product.product_has_env_script(p_info):
1315 env_script_path = src.Path(p_info.environ.env_script)
1316 env_script_path.copy(env_scripts_tmp_dir)
1318 # find the post script if any
1319 if src.product.product_has_post_script(p_info):
1320 post_script_path = src.Path(p_info.post_script)
1321 post_script_path.copy(post_scripts_tmp_dir)
1323 # find the patches if any
1324 if src.product.product_has_patches(p_info):
1325 patches = src.pyconf.Sequence()
1326 for patch_path in p_info.patches:
1327 p_path = src.Path(patch_path)
1328 p_path.copy(patches_tmp_dir)
1329 patches.append(os.path.basename(patch_path), "")
1331 if (not with_vcs) and src.product.product_is_vcs(p_info):
1332 # in non vcs mode, if the product is not archive, then make it become archive.
1334 # depending upon the incremental mode, select impacted sections
1335 if "properties" in p_info and "incremental" in p_info.properties and\
1336 p_info.properties.incremental == "yes":
1337 sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1339 sections = [p_info.section]
1340 for section in sections:
1341 if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1342 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1344 product_pyconf_cfg[section].get_source = "archive"
1345 if not "archive_info" in product_pyconf_cfg[section]:
1346 product_pyconf_cfg[section].addMapping("archive_info",
1347 src.pyconf.Mapping(product_pyconf_cfg),
1349 product_pyconf_cfg[section].archive_info.archive_name =\
1350 p_info.name + ".tgz"
1352 # save git repositories for vcs products, even if archive is not in VCS mode
1353 # in this case the user will be able to change get_source flag and work with git
1354 if src.product.product_is_vcs(p_info):
1355 # in vcs mode we must replace explicitely the git server url
1356 # (or it will not be found later because project files are not exported in archives)
1357 for section in product_pyconf_cfg:
1358 # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
1359 if "git_info" in product_pyconf_cfg[section]:
1360 for repo in product_pyconf_cfg[section].git_info:
1361 if repo in p_info.git_info:
1362 product_pyconf_cfg[section].git_info[repo] = p_info.git_info[repo]
1364 # write the pyconf file to the temporary project location
1365 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1367 ff = open(product_tmp_pyconf_path, 'w')
1368 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1369 product_pyconf_cfg.__save__(ff, 1)
1373 def write_application_pyconf(config, application_tmp_dir):
1374 '''Write the application pyconf file in the specific temporary
1375 directory containing the specific project of a source package.
1377 :param config Config: The global configuration.
1378 :param application_tmp_dir str: The path to the temporary application
1379 scripts directory of the project.
1381 application_name = config.VARS.application
1382 # write the pyconf file to the temporary application location
1383 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1384 application_name + ".pyconf")
1385 with open(application_tmp_pyconf_path, 'w') as f:
1386 f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1387 res = src.pyconf.Config()
1388 app = src.pyconf.deepCopyMapping(config.APPLICATION)
1390 # set base mode to "no" for the archive
1393 # Change the workdir
1394 app.workdir = src.pyconf.Reference(
1398 res.addMapping("APPLICATION", app, "")
1399 res.__save__(f, evaluated=False)
1402 def sat_package(config, tmp_working_dir, options, logger):
1403 '''Prepare a dictionary that stores all the needed directories and files to
1404 add in a salomeTool package.
1406 :param tmp_working_dir str: The temporary local working directory
1407 :param options OptResult: the options of the launched command
1408 :return: the dictionary that stores all the needed directories and files to
1409 add in a salomeTool package.
1410 {label : (path_on_local_machine, path_in_archive)}
1415 # we include sat himself
1416 d_project["all_sat"]=(config.VARS.salometoolsway, "")
1418 # and we overwrite local.pyconf with a clean wersion.
1419 local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1420 local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1421 local_cfg = src.pyconf.Config(local_file_path)
1422 local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1423 local_cfg.LOCAL["base"] = "default"
1424 local_cfg.LOCAL["workdir"] = "default"
1425 local_cfg.LOCAL["log_dir"] = "default"
1426 local_cfg.LOCAL["archive_dir"] = "default"
1427 local_cfg.LOCAL["VCS"] = "None"
1428 local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1430 # if the archive contains a project, we write its relative path in local.pyconf
1432 project_arch_path = os.path.join("projects", options.project,
1433 os.path.basename(options.project_file_path))
1434 local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1436 ff = open(local_pyconf_tmp_path, 'w')
1437 local_cfg.__save__(ff, 1)
1439 d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1443 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1444 '''Prepare a dictionary that stores all the needed directories and files to
1445 add in a project package.
1447 :param project_file_path str: The path to the local project.
1448 :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1449 :param tmp_working_dir str: The temporary local directory containing some
1450 specific directories or files needed in the
1452 :param embedded_in_sat boolean : the project package is embedded in a sat package
1453 :return: the dictionary that stores all the needed directories and files to
1454 add in a project package.
1455 {label : (path_on_local_machine, path_in_archive)}
1459 # Read the project file and get the directories to add to the package
1462 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1465 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1466 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1467 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1469 paths = {"APPLICATIONPATH" : "applications",
1470 "PRODUCTPATH" : "products",
1472 "MACHINEPATH" : "machines"}
1474 paths["ARCHIVEPATH"] = "archives"
1476 # Loop over the project paths and add it
1477 project_file_name = os.path.basename(project_file_path)
1479 if path not in project_pyconf_cfg:
1482 dest_path = os.path.join("projects", name_project, paths[path])
1483 project_file_dest = os.path.join("projects", name_project, project_file_name)
1485 dest_path = paths[path]
1486 project_file_dest = project_file_name
1488 # Add the directory to the files to add in the package
1489 d_project[path] = (project_pyconf_cfg[path], dest_path)
1491 # Modify the value of the path in the package
1492 project_pyconf_cfg[path] = src.pyconf.Reference(
1495 'project_path + "/' + paths[path] + '"')
1497 # Modify some values
1498 if "project_path" not in project_pyconf_cfg:
1499 project_pyconf_cfg.addMapping("project_path",
1500 src.pyconf.Mapping(project_pyconf_cfg),
1502 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1505 # we don't want to export these two fields
1506 project_pyconf_cfg.__delitem__("file_path")
1507 project_pyconf_cfg.__delitem__("PWD")
1509 project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1511 # Write the project pyconf file
1512 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1513 ff = open(project_pyconf_tmp_path, 'w')
1514 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1515 project_pyconf_cfg.__save__(ff, 1)
1517 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1521 def add_readme(config, options, where):
1522 readme_path = os.path.join(where, "README")
1523 with codecs.open(readme_path, "w", 'utf-8') as f:
1525 # templates for building the header
1527 # This package was generated with sat $version
1530 # Distribution : $dist
1532 In the following, $$ROOT represents the directory where you have installed
1533 SALOME (the directory where this file is located).
1536 if src.architecture.is_windows():
1537 readme_header = readme_header.replace('$$ROOT','%ROOT%')
1538 readme_compilation_with_binaries="""
1540 compilation based on the binaries used as prerequisites
1541 =======================================================
1543 If you fail to compile the complete application (for example because
1544 you are not root on your system and cannot install missing packages), you
1545 may try a partial compilation based on the binaries.
1546 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1547 and do some substitutions on cmake and .la files (replace the build directories
1549 The procedure to do it is:
1550 1) Remove or rename INSTALL directory if it exists
1551 2) Execute the shell script install_bin.sh:
1554 3) Use SalomeTool (as explained in Sources section) and compile only the
1555 modules you need to (with -p option)
1558 readme_header_tpl=string.Template(readme_header)
1559 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1560 "README_BIN.template")
1561 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1562 "README_LAUNCHER.template")
1563 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1564 "README_BIN_VIRTUAL_APP.template")
1565 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1566 "README_SRC.template")
1567 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1568 "README_PROJECT.template")
1569 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1570 "README_SAT.template")
1572 # prepare substitution dictionary
1574 d['user'] = config.VARS.user
1575 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1576 d['version'] = src.get_salometool_version(config)
1577 d['dist'] = config.VARS.dist
1578 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1580 if options.binaries or options.sources:
1581 d['application'] = config.VARS.application
1582 d['BINARIES'] = config.INTERNAL.config.binary_dir
1583 d['SEPARATOR'] = config.VARS.sep
1584 if src.architecture.is_windows():
1585 d['operatingSystem'] = 'Windows'
1586 d['PYTHON3'] = 'python3'
1587 d['ROOT'] = '%ROOT%'
1589 d['operatingSystem'] = 'Linux'
1592 f.write("# Application: " + d['application'] + "\n")
1593 if 'KERNEL' in config.APPLICATION.products:
1594 VersionSalome = src.get_salome_version(config)
1595 # Case where SALOME has the launcher that uses the SalomeContext API
1596 if VersionSalome >= MMP([7,3,0]):
1597 d['launcher'] = config.APPLICATION.profile.launcher_name
1599 d['virtual_app'] = 'runAppli' # this info is not used now)
1601 # write the specific sections
1602 if options.binaries:
1603 f.write(src.template.substitute(readme_template_path_bin, d))
1604 if "virtual_app" in d:
1605 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1607 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1610 f.write(src.template.substitute(readme_template_path_src, d))
1612 if options.binaries and options.sources and not src.architecture.is_windows():
1613 f.write(readme_compilation_with_binaries)
1616 f.write(src.template.substitute(readme_template_path_pro, d))
1619 f.write(src.template.substitute(readme_template_path_sat, d))
1623 def update_config(config, logger, prop, value):
1624 '''Remove from config.APPLICATION.products the products that have the property given as input.
1626 :param config Config: The global config.
1627 :param prop str: The property to filter
1628 :param value str: The value of the property to filter
1630 # if there is no APPLICATION (ex sat package -t) : nothing to do
1631 if "APPLICATION" in config:
1632 l_product_to_remove = []
1633 for product_name in config.APPLICATION.products.keys():
1634 prod_cfg = src.product.get_product_config(config, product_name)
1635 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1636 l_product_to_remove.append(product_name)
1637 for product_name in l_product_to_remove:
1638 config.APPLICATION.products.__delitem__(product_name)
1639 logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1642 '''method that is called when salomeTools is called with --help option.
1644 :return: The text to display for the package command description.
1648 The package command creates a tar file archive of a product.
1649 There are four kinds of archive, which can be mixed:
1651 1 - The binary archive.
1652 It contains the product installation directories plus a launcher.
1653 2 - The sources archive.
1654 It contains the product archives, a project (the application plus salomeTools).
1655 3 - The project archive.
1656 It contains a project (give the project file path as argument).
1657 4 - The salomeTools archive.
1658 It contains code utility salomeTools.
1661 >> sat package SALOME-master --binaries --sources""")
1663 def run(args, runner, logger):
1664 '''method that is called when salomeTools is called with package parameter.
1668 (options, args) = parser.parse_args(args)
1671 # Check that a type of package is called, and only one
1672 all_option_types = (options.binaries,
1674 options.project not in ["", None],
1676 options.bin_products)
1678 # Check if no option for package type
1679 if all_option_types.count(True) == 0:
1680 msg = _("Error: Precise a type for the package\nUse one of the "
1681 "following options: --binaries, --sources, --project or"
1682 " --salometools, --bin_products")
1683 logger.write(src.printcolors.printcError(msg), 1)
1684 logger.write("\n", 1)
1686 do_create_package = options.binaries or options.sources or options.project or options.sat
1688 if options.bin_products:
1689 ret = bin_products_archives(runner.cfg, logger, options.with_vcs)
1692 if not do_create_package:
1695 # continue to create a tar.gz package
1697 # The repository where to put the package if not Binary or Source
1698 package_default_path = runner.cfg.LOCAL.workdir
1699 # if the package contains binaries or sources:
1700 if options.binaries or options.sources or options.bin_products:
1701 # Check that the command has been called with an application
1702 src.check_config_has_application(runner.cfg)
1704 # Display information
1705 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1706 runner.cfg.VARS.application), 1)
1708 # Get the default directory where to put the packages
1709 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1710 src.ensure_path_exists(package_default_path)
1712 # if the package contains a project:
1714 # check that the project is visible by SAT
1715 projectNameFile = options.project + ".pyconf"
1717 for i in runner.cfg.PROJECTS.project_file_paths:
1718 baseName = os.path.basename(i)
1719 if baseName == projectNameFile:
1723 if foundProject is None:
1724 local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1725 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1729 Please add it in file:
1731 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1732 logger.write(src.printcolors.printcError(msg), 1)
1733 logger.write("\n", 1)
1736 options.project_file_path = foundProject
1737 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1739 # Remove the products that are filtered by the --without_properties option
1740 if options.without_properties:
1741 prop, value = options.without_properties
1742 update_config(runner.cfg, logger, prop, value)
1744 # Remove from config the products that have the not_in_package property
1745 update_config(runner.cfg, logger, "not_in_package", "yes")
1747 # get the name of the archive or build it
1749 if os.path.basename(options.name) == options.name:
1750 # only a name (not a path)
1751 archive_name = options.name
1752 dir_name = package_default_path
1754 archive_name = os.path.basename(options.name)
1755 dir_name = os.path.dirname(options.name)
1757 # suppress extension
1758 if archive_name[-len(".tgz"):] == ".tgz":
1759 archive_name = archive_name[:-len(".tgz")]
1760 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1761 archive_name = archive_name[:-len(".tar.gz")]
1765 dir_name = package_default_path
1766 if options.binaries or options.sources:
1767 archive_name = runner.cfg.APPLICATION.name
1769 if options.binaries:
1770 archive_name += "-"+runner.cfg.VARS.dist
1773 archive_name += "-SRC"
1774 if options.with_vcs:
1775 archive_name += "-VCS"
1778 archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1783 archive_name += ("satproject_" + options.project)
1785 if len(archive_name)==0: # no option worked
1786 msg = _("Error: Cannot name the archive\n"
1787 " check if at least one of the following options was "
1788 "selected : --binaries, --sources, --project or"
1790 logger.write(src.printcolors.printcError(msg), 1)
1791 logger.write("\n", 1)
1794 path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1796 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1798 # Create a working directory for all files that are produced during the
1799 # package creation and that will be removed at the end of the command
1800 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1801 src.ensure_path_exists(tmp_working_dir)
1802 logger.write("\n", 5)
1803 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1805 logger.write("\n", 3)
1807 msg = _("Preparation of files to add to the archive")
1808 logger.write(src.printcolors.printcLabel(msg), 2)
1809 logger.write("\n", 2)
1811 d_files_to_add={} # content of the archive
1813 # a dict to hold paths that will need to be substitute for users recompilations
1814 d_paths_to_substitute={}
1816 if options.binaries:
1817 d_bin_files_to_add = binary_package(runner.cfg,
1821 # for all binaries dir, store the substitution that will be required
1822 # for extra compilations
1823 for key in d_bin_files_to_add:
1824 if key.endswith("(bin)"):
1825 source_dir = d_bin_files_to_add[key][0]
1826 path_in_archive = d_bin_files_to_add[key][1].replace(
1827 runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1828 runner.cfg.INTERNAL.config.install_dir)
1829 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1830 # if basename is the same we will just substitute the dirname
1831 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1832 os.path.dirname(path_in_archive)
1834 d_paths_to_substitute[source_dir]=path_in_archive
1836 d_files_to_add.update(d_bin_files_to_add)
1838 d_files_to_add.update(source_package(runner,
1843 if options.binaries:
1844 # for archives with bin and sources we provide a shell script able to
1845 # install binaries for compilation
1846 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1848 d_paths_to_substitute,
1850 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1851 logger.write("substitutions that need to be done later : \n", 5)
1852 logger.write(str(d_paths_to_substitute), 5)
1853 logger.write("\n", 5)
1855 # --salomeTool option is not considered when --sources is selected, as this option
1856 # already brings salomeTool!
1858 d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir,
1862 DBG.write("config for package %s" % options.project, runner.cfg)
1863 d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1865 if not(d_files_to_add):
1866 msg = _("Error: Empty dictionnary to build the archive!\n")
1867 logger.write(src.printcolors.printcError(msg), 1)
1868 logger.write("\n", 1)
1871 # Add the README file in the package
1872 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1873 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1875 # Add the additional files of option add_files
1876 if options.add_files:
1877 for file_path in options.add_files:
1878 if not os.path.exists(file_path):
1879 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1881 file_name = os.path.basename(file_path)
1882 d_files_to_add[file_name] = (file_path, file_name)
1884 logger.write("\n", 2)
1885 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1886 logger.write("\n", 2)
1887 logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1891 # Creating the object tarfile
1892 tar = tarfile.open(path_targz, mode='w:gz')
1894 # get the filtering function if needed
1896 filter_function = exclude_VCS_and_extensions_26
1898 filter_function = exclude_VCS_and_extensions
1900 # Add the files to the tarfile object
1901 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1903 except KeyboardInterrupt:
1904 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1905 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1906 # remove the working directory
1907 shutil.rmtree(tmp_working_dir)
1908 logger.write(_("OK"), 1)
1909 logger.write(_("\n"), 1)
1912 # case if no application, only package sat as 'sat package -t'
1914 app = runner.cfg.APPLICATION
1918 # unconditionaly remove the tmp_local_working_dir
1920 tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1921 if os.path.isdir(tmp_local_working_dir):
1922 shutil.rmtree(tmp_local_working_dir)
1924 # remove the tmp directory, unless user has registered as developer
1925 if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1926 shutil.rmtree(tmp_working_dir)
1928 # Print again the path of the package
1929 logger.write("\n", 2)
1930 src.printcolors.print_value(logger, "Package path", path_targz, 2)