3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
31 from application import get_SALOME_modules
32 import src.debug as DBG
34 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
41 ARCHIVE_DIR = "ARCHIVES"
42 PROJECT_DIR = "PROJECT"
44 IGNORED_DIRS = [".git", ".svn"]
45 IGNORED_EXTENSIONS = []
47 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
49 PROJECT_TEMPLATE = """#!/usr/bin/env python
52 # The path to the archive root directory
53 root_path : $PWD + "/../"
55 project_path : $PWD + "/"
57 # Where to search the archives of the products
58 ARCHIVEPATH : $root_path + "ARCHIVES"
59 # Where to search the pyconf of the applications
60 APPLICATIONPATH : $project_path + "applications/"
61 # Where to search the pyconf of the products
62 PRODUCTPATH : $project_path + "products/"
63 # Where to search the pyconf of the jobs of the project
64 JOBPATH : $project_path + "jobs/"
65 # Where to search the pyconf of the machines of the project
66 MACHINEPATH : $project_path + "machines/"
69 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
77 archive_dir : 'default'
84 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
85 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
89 # Define all possible option for the package command : sat package <options>
90 parser = src.options.Options()
91 parser.add_option('b', 'binaries', 'boolean', 'binaries',
92 _('Optional: Produce a binary package.'), False)
93 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
94 _('Optional: Only binary package: produce the archive even if '
95 'there are some missing products.'), False)
96 parser.add_option('s', 'sources', 'boolean', 'sources',
97 _('Optional: Produce a compilable archive of the sources of the '
98 'application.'), False)
99 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
100 _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
101 'Sat prepare will use VCS mode instead to retrieve them'),
103 parser.add_option('', 'ftp', 'boolean', 'ftp',
104 _('Optional: Do not embed archives for products in archive mode.'
105 'Sat prepare will use ftp instead to retrieve them'),
107 parser.add_option('e', 'exe', 'string', 'exe',
108 _('Optional: Produce an extra launcher based upon the exe given as argument.'), "")
109 parser.add_option('p', 'project', 'string', 'project',
110 _('Optional: Produce an archive that contains a project.'), "")
111 parser.add_option('t', 'salometools', 'boolean', 'sat',
112 _('Optional: Produce an archive that contains salomeTools.'), False)
113 parser.add_option('n', 'name', 'string', 'name',
114 _('Optional: The name or full path of the archive.'), None)
115 parser.add_option('', 'add_files', 'list2', 'add_files',
116 _('Optional: The list of additional files to add to the archive.'), [])
117 parser.add_option('', 'without_properties', 'properties', 'without_properties',
118 _('Optional: Filter the products by their properties.\n\tSyntax: '
119 '--without_properties <property>:<value>'))
122 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
123 '''Create an archive containing all directories and files that are given in
124 the d_content argument.
126 :param tar tarfile: The tarfile instance used to make the archive.
127 :param name_archive str: The name of the archive to make.
128 :param d_content dict: The dictionary that contain all directories and files
129 to add in the archive.
131 (path_on_local_machine, path_in_archive)
132 :param logger Logger: the logging instance
133 :param f_exclude Function: the function that filters
134 :return: 0 if success, 1 if not.
137 # get the max length of the messages in order to make the display
138 max_len = len(max(d_content.keys(), key=len))
141 # loop over each directory or file stored in the d_content dictionary
142 names = sorted(d_content.keys())
143 DBG.write("add tar names", names)
145 # used to avoid duplications (for pip install in python, or single_install_dir cases)
148 # display information
149 len_points = max_len - len(name) + 3
150 local_path, archive_path = d_content[name]
151 in_archive = os.path.join(name_archive, archive_path)
152 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
153 # Get the local path and the path in archive
154 # of the directory or file to add
155 # Add it in the archive
157 key=local_path+"->"+in_archive
158 if key not in already_added:
162 exclude=exclude_VCS_and_extensions_26)
166 filter=exclude_VCS_and_extensions)
167 already_added.add(key)
168 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
169 except Exception as e:
170 logger.write(src.printcolors.printcError(_("KO ")), 3)
171 logger.write(str(e), 3)
173 logger.write("\n", 3)
177 def exclude_VCS_and_extensions_26(filename):
178 ''' The function that is used to exclude from package the link to the
179 VCS repositories (like .git) (only for python 2.6)
181 :param filename Str: The filname to exclude (or not).
182 :return: True if the file has to be exclude
185 for dir_name in IGNORED_DIRS:
186 if dir_name in filename:
188 for extension in IGNORED_EXTENSIONS:
189 if filename.endswith(extension):
193 def exclude_VCS_and_extensions(tarinfo):
194 ''' The function that is used to exclude from package the link to the
195 VCS repositories (like .git)
197 :param filename Str: The filname to exclude (or not).
198 :return: None if the file has to be exclude
199 :rtype: tarinfo or None
201 filename = tarinfo.name
202 for dir_name in IGNORED_DIRS:
203 if dir_name in filename:
205 for extension in IGNORED_EXTENSIONS:
206 if filename.endswith(extension):
210 def produce_relative_launcher(config,
215 '''Create a specific SALOME launcher for the binary package. This launcher
218 :param config Config: The global configuration.
219 :param logger Logger: the logging instance
220 :param file_dir str: the directory where to put the launcher
221 :param file_name str: The launcher name
222 :param binaries_dir_name str: the name of the repository where the binaries
224 :return: the path of the produced launcher
228 # set base mode to "no" for the archive - save current mode to restore it at the end
229 if "base" in config.APPLICATION:
230 base_setting=config.APPLICATION.base
233 config.APPLICATION.base="no"
235 # get KERNEL installation path
236 kernel_info = src.product.get_product_config(config, "KERNEL")
237 kernel_base_name=os.path.basename(kernel_info.install_dir)
238 if kernel_base_name.startswith("config"):
239 # case of kernel installed in base. We remove "config-i"
240 kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
242 kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
244 # set kernel bin dir (considering fhs property)
245 kernel_cfg = src.product.get_product_config(config, "KERNEL")
246 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
247 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
249 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
251 # check if the application contains an application module
252 # check also if the application has a distene product,
253 # in this case get its licence file name
254 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
255 salome_application_name="Not defined"
256 distene_licence_file_name=False
257 for prod_name, prod_info in l_product_info:
258 # look for a "salome application" and a distene product
259 if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
260 distene_licence_file_name = src.product.product_has_licence(prod_info,
261 config.PATHS.LICENCEPATH)
262 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
263 salome_application_name=prod_info.name
265 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
266 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
267 if salome_application_name == "Not defined":
268 app_root_dir=kernel_root_dir
270 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
273 additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
274 config.VARS.sep + bin_kernel_install_dir
275 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
276 additional_env['sat_python_version'] = 3
278 additional_env['sat_python_version'] = 2
280 additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
282 # create an environment file writer
283 writer = src.environment.FileEnvWriter(config,
289 filepath = os.path.join(file_dir, file_name)
291 writer.write_env_file(filepath,
294 additional_env=additional_env,
295 no_path_init="False",
296 for_package = binaries_dir_name)
298 # Little hack to put out_dir_Path outside the strings
299 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
300 src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
302 # A hack to put a call to a file for distene licence.
303 # It does nothing to an application that has no distene product
304 if distene_licence_file_name:
305 logger.write("Application has a distene licence file! We use it in package launcher", 5)
306 hack_for_distene_licence(filepath, distene_licence_file_name)
308 # change the rights in order to make the file executable for everybody
318 # restore modified setting by its initial value
319 config.APPLICATION.base=base_setting
323 def hack_for_distene_licence(filepath, licence_file):
324 '''Replace the distene licence env variable by a call to a file.
326 :param filepath Str: The path to the launcher to modify.
328 shutil.move(filepath, filepath + "_old")
330 filein = filepath + "_old"
331 fin = open(filein, "r")
332 fout = open(fileout, "w")
333 text = fin.readlines()
334 # Find the Distene section
336 for i,line in enumerate(text):
337 if "# Set DISTENE License" in line:
341 # No distene product, there is nothing to do
347 del text[num_line +1]
348 del text[num_line +1]
349 text_to_insert =""" try:
350 distene_licence_file=r"%s"
351 if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
352 import importlib.util
353 spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
354 distene=importlib.util.module_from_spec(spec_dist)
355 spec_dist.loader.exec_module(distene)
358 distene = imp.load_source('distene_licence', distene_licence_file)
359 distene.set_distene_variables(context)
361 pass\n""" % licence_file
362 text.insert(num_line + 1, text_to_insert)
369 def produce_relative_env_files(config,
374 '''Create some specific environment files for the binary package. These
375 files use relative paths.
377 :param config Config: The global configuration.
378 :param logger Logger: the logging instance
379 :param file_dir str: the directory where to put the files
380 :param binaries_dir_name str: the name of the repository where the binaries
382 :param exe_name str: if given generate a launcher executing exe_name
383 :return: the list of path of the produced environment files
386 # create an environment file writer
387 writer = src.environment.FileEnvWriter(config,
392 if src.architecture.is_windows():
394 filename = "env_launch.bat"
397 filename = "env_launch.sh"
400 filename=os.path.basename(exe_name)
403 filepath = writer.write_env_file(filename,
406 for_package = binaries_dir_name)
408 # Little hack to put out_dir_Path as environment variable
409 if src.architecture.is_windows() :
410 src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
411 src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
412 src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
414 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
415 src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
418 if src.architecture.is_windows():
419 cmd="\n\nrem Launch exe with user arguments\n%s " % exe_name + "%*"
421 cmd='\n\n# Launch exe with user arguments\n%s "$*"' % exe_name
422 with open(filepath, "a") as exe_launcher:
423 exe_launcher.write(cmd)
425 # change the rights in order to make the file executable for everybody
437 def produce_install_bin_file(config,
442 '''Create a bash shell script which do substitutions in BIRARIES dir
443 in order to use it for extra compilations.
445 :param config Config: The global configuration.
446 :param logger Logger: the logging instance
447 :param file_dir str: the directory where to put the files
448 :param d_sub, dict: the dictionnary that contains the substitutions to be done
449 :param file_name str: the name of the install script file
450 :return: the produced file
454 filepath = os.path.join(file_dir, file_name)
455 # open the file and write into it
456 # use codec utf-8 as sat variables are in unicode
457 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
458 installbin_template_path = os.path.join(config.VARS.internal_dir,
459 "INSTALL_BIN.template")
461 # build the name of the directory that will contain the binaries
462 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
463 # build the substitution loop
464 loop_cmd = "for f in $(grep -RIl"
466 loop_cmd += " -e "+ key
467 loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
470 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
471 loop_cmd += ' " $f\ndone'
474 d["BINARIES_DIR"] = binaries_dir_name
475 d["SUBSTITUTION_LOOP"]=loop_cmd
476 d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
478 # substitute the template and write it in file
479 content=src.template.substitute(installbin_template_path, d)
480 installbin_file.write(content)
481 # change the rights in order to make the file executable for everybody
493 def product_appli_creation_script(config,
497 '''Create a script that can produce an application (EDF style) in the binary
500 :param config Config: The global configuration.
501 :param logger Logger: the logging instance
502 :param file_dir str: the directory where to put the file
503 :param binaries_dir_name str: the name of the repository where the binaries
505 :return: the path of the produced script file
508 template_name = "create_appli.py.for_bin_packages.template"
509 template_path = os.path.join(config.VARS.internal_dir, template_name)
510 text_to_fill = open(template_path, "r").read()
511 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
512 '"' + binaries_dir_name + '"')
515 for product_name in get_SALOME_modules(config):
516 product_info = src.product.get_product_config(config, product_name)
518 if src.product.product_is_smesh_plugin(product_info):
521 if 'install_dir' in product_info and bool(product_info.install_dir):
522 if src.product.product_is_cpp(product_info):
524 for cpp_name in src.product.get_product_components(product_info):
525 line_to_add = ("<module name=\"" +
527 "\" gui=\"yes\" path=\"''' + "
528 "os.path.join(dir_bin_name, \"" +
529 cpp_name + "\") + '''\"/>")
532 line_to_add = ("<module name=\"" +
534 "\" gui=\"yes\" path=\"''' + "
535 "os.path.join(dir_bin_name, \"" +
536 product_name + "\") + '''\"/>")
537 text_to_add += line_to_add + "\n"
539 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
541 tmp_file_path = os.path.join(file_dir, "create_appli.py")
542 ff = open(tmp_file_path, "w")
543 ff.write(filled_text)
546 # change the rights in order to make the file executable for everybody
547 os.chmod(tmp_file_path,
558 def binary_package(config, logger, options, tmp_working_dir):
559 '''Prepare a dictionary that stores all the needed directories and files to
560 add in a binary package.
562 :param config Config: The global configuration.
563 :param logger Logger: the logging instance
564 :param options OptResult: the options of the launched command
565 :param tmp_working_dir str: The temporary local directory containing some
566 specific directories or files needed in the
568 :return: the dictionary that stores all the needed directories and files to
569 add in a binary package.
570 {label : (path_on_local_machine, path_in_archive)}
574 # Get the list of product installation to add to the archive
575 l_products_name = sorted(config.APPLICATION.products.keys())
576 l_product_info = src.product.get_products_infos(l_products_name,
579 # suppress compile time products for binaries-only archives
580 if not options.sources:
581 update_config(config, logger, "compile_time", "yes")
586 l_sources_not_present = []
587 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
588 if ("APPLICATION" in config and
589 "properties" in config.APPLICATION and
590 "mesa_launcher_in_package" in config.APPLICATION.properties and
591 config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
592 generate_mesa_launcher=True
594 for prod_name, prod_info in l_product_info:
595 # skip product with property not_in_package set to yes
596 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
599 # Add the sources of the products that have the property
600 # sources_in_package : "yes"
601 if src.get_property_in_product_cfg(prod_info,
602 "sources_in_package") == "yes":
603 if os.path.exists(prod_info.source_dir):
604 l_source_dir.append((prod_name, prod_info.source_dir))
606 l_sources_not_present.append(prod_name)
608 # ignore the native and fixed products for install directories
609 if (src.product.product_is_native(prod_info)
610 or src.product.product_is_fixed(prod_info)
611 or not src.product.product_compiles(prod_info)):
614 # products with single_fir property will be installed in the PRODUCTS directory of the archive
615 is_single_dir=(src.appli_test_property(config,"single_install_dir", "yes") and \
616 src.product.product_test_property(prod_info,"single_install_dir", "yes"))
617 if src.product.check_installation(config, prod_info):
618 l_install_dir.append((prod_name, prod_info.name, prod_info.install_dir, is_single_dir))
620 l_not_installed.append(prod_name)
622 # Add also the cpp generated modules (if any)
623 if src.product.product_is_cpp(prod_info):
625 for name_cpp in src.product.get_product_components(prod_info):
626 install_dir = os.path.join(config.APPLICATION.workdir,
627 config.INTERNAL.config.install_dir,
629 if os.path.exists(install_dir):
630 l_install_dir.append((name_cpp, name_cpp, install_dir, False))
632 l_not_installed.append(name_cpp)
634 # check the name of the directory that (could) contains the binaries
635 # from previous detar
636 binaries_from_detar = os.path.join(
637 config.APPLICATION.workdir,
638 config.INTERNAL.config.binary_dir + config.VARS.dist)
639 if os.path.exists(binaries_from_detar):
641 WARNING: existing binaries directory from previous detar installation:
643 To make new package from this, you have to:
644 1) install binaries in INSTALL directory with the script "install_bin.sh"
645 see README file for more details
646 2) or recompile everything in INSTALL with "sat compile" command
647 this step is long, and requires some linux packages to be installed
649 """ % binaries_from_detar)
651 # Print warning or error if there are some missing products
652 if len(l_not_installed) > 0:
653 text_missing_prods = ""
654 for p_name in l_not_installed:
655 text_missing_prods += " - " + p_name + "\n"
656 if not options.force_creation:
657 msg = _("ERROR: there are missing product installations:")
658 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
661 raise src.SatException(msg)
663 msg = _("WARNING: there are missing products installations:")
664 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
668 # Do the same for sources
669 if len(l_sources_not_present) > 0:
670 text_missing_prods = ""
671 for p_name in l_sources_not_present:
672 text_missing_prods += "-" + p_name + "\n"
673 if not options.force_creation:
674 msg = _("ERROR: there are missing product sources:")
675 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
678 raise src.SatException(msg)
680 msg = _("WARNING: there are missing products sources:")
681 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
685 # construct the name of the directory that will contain the binaries
686 if src.architecture.is_windows():
687 binaries_dir_name = config.INTERNAL.config.binary_dir
689 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
690 # construct the correlation table between the product names, there
691 # actual install directories and there install directory in archive
693 for prod_name, prod_info_name, install_dir, is_single_dir in l_install_dir:
694 prod_base_name=os.path.basename(install_dir)
695 if prod_base_name.startswith("config"):
696 # case of a products installed in base. Because the archive is in base:no mode,
697 # we replace "config-i" by the product name or by PRODUCTS if single-dir
699 prod_base_name=config.INTERNAL.config.single_install_dir
701 prod_base_name=prod_info_name
702 path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
703 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
705 for prod_name, source_dir in l_source_dir:
706 path_in_archive = os.path.join("SOURCES", prod_name)
707 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
709 # for packages of SALOME applications including KERNEL,
710 # we produce a salome launcher or a virtual application (depending on salome version)
711 if 'KERNEL' in config.APPLICATION.products:
712 VersionSalome = src.get_salome_version(config)
713 # Case where SALOME has the launcher that uses the SalomeContext API
714 if VersionSalome >= 730:
715 # create the relative launcher and add it to the files to add
716 launcher_name = src.get_launcher_name(config)
717 launcher_package = produce_relative_launcher(config,
722 d_products["launcher"] = (launcher_package, launcher_name)
724 # if the application contains mesa products, we generate in addition to the
725 # classical salome launcher a launcher using mesa and called mesa_salome
726 # (the mesa launcher will be used for remote usage through ssh).
727 if generate_mesa_launcher:
728 #if there is one : store the use_mesa property
729 restore_use_mesa_option=None
730 if ('properties' in config.APPLICATION and
731 'use_mesa' in config.APPLICATION.properties):
732 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
734 # activate mesa property, and generate a mesa launcher
735 src.activate_mesa_property(config) #activate use_mesa property
736 launcher_mesa_name="mesa_"+launcher_name
737 launcher_package_mesa = produce_relative_launcher(config,
742 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
744 # if there was a use_mesa value, we restore it
745 # else we set it to the default value "no"
746 if restore_use_mesa_option != None:
747 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
749 config.APPLICATION.properties.use_mesa="no"
752 # if we mix binaries and sources, we add a copy of the launcher,
753 # prefixed with "bin",in order to avoid clashes
754 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
756 # Provide a script for the creation of an application EDF style
757 appli_script = product_appli_creation_script(config,
762 d_products["appli script"] = (appli_script, "create_appli.py")
764 # Put also the environment file
765 env_file = produce_relative_env_files(config,
770 if src.architecture.is_windows():
771 filename = "env_launch.bat"
773 filename = "env_launch.sh"
774 d_products["environment file"] = (env_file, filename)
776 # If option exe, produce an extra launcher based on specified exe
778 exe_file = produce_relative_env_files(config,
784 if src.architecture.is_windows():
785 filename = os.path.basename(options.exe) + ".bat"
787 filename = os.path.basename(options.exe) + ".sh"
788 d_products["exe file"] = (exe_file, filename)
793 def source_package(sat, config, logger, options, tmp_working_dir):
794 '''Prepare a dictionary that stores all the needed directories and files to
795 add in a source package.
797 :param config Config: The global configuration.
798 :param logger Logger: the logging instance
799 :param options OptResult: the options of the launched command
800 :param tmp_working_dir str: The temporary local directory containing some
801 specific directories or files needed in the
803 :return: the dictionary that stores all the needed directories and files to
804 add in a source package.
805 {label : (path_on_local_machine, path_in_archive)}
810 # Get all the products that are prepared using an archive
811 # unless ftp mode is specified (in this case the user of the
812 # archive will get the sources through the ftp mode of sat prepare
814 logger.write("Find archive products ... ")
815 d_archives, l_pinfo_vcs = get_archives(config, logger)
816 logger.write("Done\n")
819 if not options.with_vcs and len(l_pinfo_vcs) > 0:
820 # Make archives with the products that are not prepared using an archive
821 # (git, cvs, svn, etc)
822 logger.write("Construct archives for vcs products ... ")
823 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
828 logger.write("Done\n")
831 logger.write("Create the project ... ")
832 d_project = create_project_for_src_package(config,
836 logger.write("Done\n")
839 tmp_sat = add_salomeTools(config, tmp_working_dir)
840 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
842 # Add a sat symbolic link if not win
843 if not src.architecture.is_windows():
844 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
848 # In the jobs, os.getcwd() can fail
849 t = config.LOCAL.workdir
850 os.chdir(tmp_working_dir)
851 if os.path.lexists(tmp_satlink_path):
852 os.remove(tmp_satlink_path)
853 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
856 d_sat["sat link"] = (tmp_satlink_path, "sat")
858 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
861 def get_archives(config, logger):
862 '''Find all the products that are get using an archive and all the products
863 that are get using a vcs (git, cvs, svn) repository.
865 :param config Config: The global configuration.
866 :param logger Logger: the logging instance
867 :return: the dictionary {name_product :
868 (local path of its archive, path in the package of its archive )}
869 and the list of specific configuration corresponding to the vcs
873 # Get the list of product informations
874 l_products_name = config.APPLICATION.products.keys()
875 l_product_info = src.product.get_products_infos(l_products_name,
879 for p_name, p_info in l_product_info:
880 # skip product with property not_in_package set to yes
881 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
883 # ignore the native and fixed products
884 if (src.product.product_is_native(p_info)
885 or src.product.product_is_fixed(p_info)):
887 if p_info.get_source == "archive":
888 archive_path = p_info.archive_info.archive_name
889 archive_name = os.path.basename(archive_path)
890 d_archives[p_name] = (archive_path,
891 os.path.join(ARCHIVE_DIR, archive_name))
892 if (src.appli_test_property(config,"pip", "yes") and
893 src.product.product_test_property(p_info,"pip", "yes")):
894 # if pip mode is activated, and product is managed by pip
895 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
896 pip_wheel_pattern=os.path.join(pip_wheels_dir,
897 "%s-%s*" % (p_info.name, p_info.version))
898 pip_wheel_path=glob.glob(pip_wheel_pattern)
899 msg_pip_not_found="Error in get_archive, pip wheel for "\
900 "product %s-%s was not found in %s directory"
901 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
902 "product %s-%s were found in %s directory"
903 if len(pip_wheel_path)==0:
904 raise src.SatException(msg_pip_not_found %\
905 (p_info.name, p_info.version, pip_wheels_dir))
906 if len(pip_wheel_path)>1:
907 raise src.SatException(msg_pip_two_or_more %\
908 (p_info.name, p_info.version, pip_wheels_dir))
910 pip_wheel_name=os.path.basename(pip_wheel_path[0])
911 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0],
912 os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
914 # this product is not managed by archive,
915 # an archive of the vcs directory will be created by get_archive_vcs
916 l_pinfo_vcs.append((p_name, p_info))
918 return d_archives, l_pinfo_vcs
920 def add_salomeTools(config, tmp_working_dir):
921 '''Prepare a version of salomeTools that has a specific local.pyconf file
922 configured for a source package.
924 :param config Config: The global configuration.
925 :param tmp_working_dir str: The temporary local directory containing some
926 specific directories or files needed in the
928 :return: The path to the local salomeTools directory to add in the package
931 # Copy sat in the temporary working directory
932 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
933 sat_running_path = src.Path(config.VARS.salometoolsway)
934 sat_running_path.copy(sat_tmp_path)
936 # Update the local.pyconf file that contains the path to the project
937 local_pyconf_name = "local.pyconf"
938 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
939 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
940 # Remove the .pyconf file in the root directory of salomeTools if there is
941 # any. (For example when launching jobs, a pyconf file describing the jobs
942 # can be here and is not useful)
943 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
944 for file_or_dir in files_or_dir_SAT:
945 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
946 file_path = os.path.join(tmp_working_dir,
951 ff = open(local_pyconf_file, "w")
952 ff.write(LOCAL_TEMPLATE)
955 return sat_tmp_path.path
957 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
958 '''For sources package that require that all products are get using an
959 archive, one has to create some archive for the vcs products.
960 So this method calls the clean and source command of sat and then create
963 :param l_pinfo_vcs List: The list of specific configuration corresponding to
965 :param sat Sat: The Sat instance that can be called to clean and source the
967 :param config Config: The global configuration.
968 :param logger Logger: the logging instance
969 :param tmp_working_dir str: The temporary local directory containing some
970 specific directories or files needed in the
972 :return: the dictionary that stores all the archives to add in the source
973 package. {label : (path_on_local_machine, path_in_archive)}
976 # clean the source directory of all the vcs products, then use the source
977 # command and thus construct an archive that will not contain the patches
978 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
979 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
980 logger.write(_("\nclean sources\n"))
981 args_clean = config.VARS.application
982 args_clean += " --sources --products "
983 args_clean += ",".join(l_prod_names)
984 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
985 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
988 logger.write(_("get sources\n"))
989 args_source = config.VARS.application
990 args_source += " --products "
991 args_source += ",".join(l_prod_names)
992 svgDir = sat.cfg.APPLICATION.workdir
993 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
994 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
995 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
996 # DBG.write("sat config id", id(sat.cfg), True)
997 # shit as config is not same id() as for sat.source()
998 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
1000 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
1002 # make the new archives
1004 for pn, pinfo in l_pinfo_vcs:
1005 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
1006 logger.write("make archive vcs '%s'\n" % path_archive)
1007 d_archives_vcs[pn] = (path_archive,
1008 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
1009 sat.cfg.APPLICATION.workdir = svgDir
1010 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
1011 return d_archives_vcs
1013 def make_archive(prod_name, prod_info, where):
1014 '''Create an archive of a product by searching its source directory.
1016 :param prod_name str: The name of the product.
1017 :param prod_info Config: The specific configuration corresponding to the
1019 :param where str: The path of the repository where to put the resulting
1021 :return: The path of the resulting archive
1024 path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
1025 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
1026 local_path = prod_info.source_dir
1028 tar_prod.add(local_path,
1030 exclude=exclude_VCS_and_extensions_26)
1032 tar_prod.add(local_path,
1034 filter=exclude_VCS_and_extensions)
1036 return path_targz_prod
1038 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
1039 '''Create a specific project for a source package.
1041 :param config Config: The global configuration.
1042 :param tmp_working_dir str: The temporary local directory containing some
1043 specific directories or files needed in the
1045 :param with_vcs boolean: True if the package is with vcs products (not
1046 transformed into archive products)
1047 :param with_ftp boolean: True if the package use ftp servers to get archives
1048 :return: The dictionary
1049 {"project" : (produced project, project path in the archive)}
1053 # Create in the working temporary directory the full project tree
1054 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1055 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
1057 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
1060 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
1063 patches_tmp_dir = os.path.join(project_tmp_dir,
1066 application_tmp_dir = os.path.join(project_tmp_dir,
1068 for directory in [project_tmp_dir,
1069 compil_scripts_tmp_dir,
1070 env_scripts_tmp_dir,
1072 application_tmp_dir]:
1073 src.ensure_path_exists(directory)
1075 # Create the pyconf that contains the information of the project
1076 project_pyconf_name = "project.pyconf"
1077 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1078 ff = open(project_pyconf_file, "w")
1079 ff.write(PROJECT_TEMPLATE)
1080 if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1081 ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
1082 for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1083 ftp_path=ftp_path+":"+ftpserver
1085 ff.write("# ftp servers where to search for prerequisite archives\n")
1087 # add licence paths if any
1088 if len(config.PATHS.LICENCEPATH) > 0:
1089 licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1090 for path in config.PATHS.LICENCEPATH[1:]:
1091 licence_path=licence_path+":"+path
1093 ff.write("\n# Where to search for licences\n")
1094 ff.write(licence_path)
1099 # Loop over the products to get there pyconf and all the scripts
1100 # (compilation, environment, patches)
1101 # and create the pyconf file to add to the project
1102 lproducts_name = config.APPLICATION.products.keys()
1103 l_products = src.product.get_products_infos(lproducts_name, config)
1104 for p_name, p_info in l_products:
1105 # skip product with property not_in_package set to yes
1106 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1108 find_product_scripts_and_pyconf(p_name,
1112 compil_scripts_tmp_dir,
1113 env_scripts_tmp_dir,
1115 products_pyconf_tmp_dir)
1117 # for the application pyconf, we write directly the config
1118 # don't search for the original pyconf file
1119 # to avoid problems with overwrite sections and rm_products key
1120 write_application_pyconf(config, application_tmp_dir)
1122 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1125 def find_product_scripts_and_pyconf(p_name,
1129 compil_scripts_tmp_dir,
1130 env_scripts_tmp_dir,
1132 products_pyconf_tmp_dir):
1133 '''Create a specific pyconf file for a given product. Get its environment
1134 script, its compilation script and patches and put it in the temporary
1135 working directory. This method is used in the source package in order to
1136 construct the specific project.
1138 :param p_name str: The name of the product.
1139 :param p_info Config: The specific configuration corresponding to the
1141 :param config Config: The global configuration.
1142 :param with_vcs boolean: True if the package is with vcs products (not
1143 transformed into archive products)
1144 :param compil_scripts_tmp_dir str: The path to the temporary compilation
1145 scripts directory of the project.
1146 :param env_scripts_tmp_dir str: The path to the temporary environment script
1147 directory of the project.
1148 :param patches_tmp_dir str: The path to the temporary patch scripts
1149 directory of the project.
1150 :param products_pyconf_tmp_dir str: The path to the temporary product
1151 scripts directory of the project.
1154 # read the pyconf of the product
1155 product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1157 # find the compilation script if any
1158 if src.product.product_has_script(p_info):
1159 compil_script_path = src.Path(p_info.compil_script)
1160 compil_script_path.copy(compil_scripts_tmp_dir)
1162 # find the environment script if any
1163 if src.product.product_has_env_script(p_info):
1164 env_script_path = src.Path(p_info.environ.env_script)
1165 env_script_path.copy(env_scripts_tmp_dir)
1167 # find the patches if any
1168 if src.product.product_has_patches(p_info):
1169 patches = src.pyconf.Sequence()
1170 for patch_path in p_info.patches:
1171 p_path = src.Path(patch_path)
1172 p_path.copy(patches_tmp_dir)
1173 patches.append(os.path.basename(patch_path), "")
1175 if (not with_vcs) and src.product.product_is_vcs(p_info):
1176 # in non vcs mode, if the product is not archive, then make it become archive.
1178 # depending upon the incremental mode, select impacted sections
1179 if "properties" in p_info and "incremental" in p_info.properties and\
1180 p_info.properties.incremental == "yes":
1181 sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1183 sections = [p_info.section]
1184 for section in sections:
1185 if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1186 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1188 product_pyconf_cfg[section].get_source = "archive"
1189 if not "archive_info" in product_pyconf_cfg[section]:
1190 product_pyconf_cfg[section].addMapping("archive_info",
1191 src.pyconf.Mapping(product_pyconf_cfg),
1193 product_pyconf_cfg[section].archive_info.archive_name =\
1194 p_info.name + ".tgz"
1196 if (with_vcs) and src.product.product_is_vcs(p_info):
1197 # in vcs mode we must replace explicitely the git server url
1198 # (or it will not be found later because project files are not exported in archives)
1199 for section in product_pyconf_cfg:
1200 # replace in all sections of the product pyconf the git repo definition by its substitued value (found in p_info)
1201 if "git_info" in product_pyconf_cfg[section]:
1202 for repo in product_pyconf_cfg[section].git_info:
1203 if repo in p_info.git_info:
1204 product_pyconf_cfg[section].git_info[repo] = p_info.git_info[repo]
1206 # write the pyconf file to the temporary project location
1207 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1209 ff = open(product_tmp_pyconf_path, 'w')
1210 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1211 product_pyconf_cfg.__save__(ff, 1)
1215 def write_application_pyconf(config, application_tmp_dir):
1216 '''Write the application pyconf file in the specific temporary
1217 directory containing the specific project of a source package.
1219 :param config Config: The global configuration.
1220 :param application_tmp_dir str: The path to the temporary application
1221 scripts directory of the project.
1223 application_name = config.VARS.application
1224 # write the pyconf file to the temporary application location
1225 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1226 application_name + ".pyconf")
1227 with open(application_tmp_pyconf_path, 'w') as f:
1228 f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1229 res = src.pyconf.Config()
1230 app = src.pyconf.deepCopyMapping(config.APPLICATION)
1232 # set base mode to "no" for the archive
1235 # Change the workdir
1236 app.workdir = src.pyconf.Reference(
1239 'VARS.salometoolsway + $VARS.sep + ".."')
1240 res.addMapping("APPLICATION", app, "")
1241 res.__save__(f, evaluated=False)
1244 def sat_package(config, tmp_working_dir, options, logger):
1245 '''Prepare a dictionary that stores all the needed directories and files to
1246 add in a salomeTool package.
1248 :param tmp_working_dir str: The temporary local working directory
1249 :param options OptResult: the options of the launched command
1250 :return: the dictionary that stores all the needed directories and files to
1251 add in a salomeTool package.
1252 {label : (path_on_local_machine, path_in_archive)}
1257 # we include sat himself
1258 d_project["all_sat"]=(config.VARS.salometoolsway, "")
1260 # and we overwrite local.pyconf with a clean wersion.
1261 local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1262 local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1263 local_cfg = src.pyconf.Config(local_file_path)
1264 local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1265 local_cfg.LOCAL["base"] = "default"
1266 local_cfg.LOCAL["workdir"] = "default"
1267 local_cfg.LOCAL["log_dir"] = "default"
1268 local_cfg.LOCAL["archive_dir"] = "default"
1269 local_cfg.LOCAL["VCS"] = "None"
1270 local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1272 # if the archive contains a project, we write its relative path in local.pyconf
1274 project_arch_path = os.path.join("projects", options.project,
1275 os.path.basename(options.project_file_path))
1276 local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1278 ff = open(local_pyconf_tmp_path, 'w')
1279 local_cfg.__save__(ff, 1)
1281 d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1285 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1286 '''Prepare a dictionary that stores all the needed directories and files to
1287 add in a project package.
1289 :param project_file_path str: The path to the local project.
1290 :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1291 :param tmp_working_dir str: The temporary local directory containing some
1292 specific directories or files needed in the
1294 :param embedded_in_sat boolean : the project package is embedded in a sat package
1295 :return: the dictionary that stores all the needed directories and files to
1296 add in a project package.
1297 {label : (path_on_local_machine, path_in_archive)}
1301 # Read the project file and get the directories to add to the package
1304 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1307 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1308 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1309 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1311 paths = {"APPLICATIONPATH" : "applications",
1312 "PRODUCTPATH" : "products",
1314 "MACHINEPATH" : "machines"}
1316 paths["ARCHIVEPATH"] = "archives"
1318 # Loop over the project paths and add it
1319 project_file_name = os.path.basename(project_file_path)
1321 if path not in project_pyconf_cfg:
1324 dest_path = os.path.join("projects", name_project, paths[path])
1325 project_file_dest = os.path.join("projects", name_project, project_file_name)
1327 dest_path = paths[path]
1328 project_file_dest = project_file_name
1330 # Add the directory to the files to add in the package
1331 d_project[path] = (project_pyconf_cfg[path], dest_path)
1333 # Modify the value of the path in the package
1334 project_pyconf_cfg[path] = src.pyconf.Reference(
1337 'project_path + "/' + paths[path] + '"')
1339 # Modify some values
1340 if "project_path" not in project_pyconf_cfg:
1341 project_pyconf_cfg.addMapping("project_path",
1342 src.pyconf.Mapping(project_pyconf_cfg),
1344 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1347 # we don't want to export these two fields
1348 project_pyconf_cfg.__delitem__("file_path")
1349 project_pyconf_cfg.__delitem__("PWD")
1351 project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1353 # Write the project pyconf file
1354 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1355 ff = open(project_pyconf_tmp_path, 'w')
1356 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1357 project_pyconf_cfg.__save__(ff, 1)
1359 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1363 def add_readme(config, options, where):
1364 readme_path = os.path.join(where, "README")
1365 with codecs.open(readme_path, "w", 'utf-8') as f:
1367 # templates for building the header
1369 # This package was generated with sat $version
1372 # Distribution : $dist
1374 In the following, $$ROOT represents the directory where you have installed
1375 SALOME (the directory where this file is located).
1378 if src.architecture.is_windows():
1379 readme_header = readme_header.replace('$$ROOT','%ROOT%')
1380 readme_compilation_with_binaries="""
1382 compilation based on the binaries used as prerequisites
1383 =======================================================
1385 If you fail to compile the complete application (for example because
1386 you are not root on your system and cannot install missing packages), you
1387 may try a partial compilation based on the binaries.
1388 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1389 and do some substitutions on cmake and .la files (replace the build directories
1391 The procedure to do it is:
1392 1) Remove or rename INSTALL directory if it exists
1393 2) Execute the shell script install_bin.sh:
1396 3) Use SalomeTool (as explained in Sources section) and compile only the
1397 modules you need to (with -p option)
1400 readme_header_tpl=string.Template(readme_header)
1401 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1402 "README_BIN.template")
1403 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1404 "README_LAUNCHER.template")
1405 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1406 "README_BIN_VIRTUAL_APP.template")
1407 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1408 "README_SRC.template")
1409 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1410 "README_PROJECT.template")
1411 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1412 "README_SAT.template")
1414 # prepare substitution dictionary
1416 d['user'] = config.VARS.user
1417 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1418 d['version'] = src.get_salometool_version(config)
1419 d['dist'] = config.VARS.dist
1420 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1422 if options.binaries or options.sources:
1423 d['application'] = config.VARS.application
1424 d['BINARIES'] = config.INTERNAL.config.binary_dir
1425 d['SEPARATOR'] = config.VARS.sep
1426 if src.architecture.is_windows():
1427 d['operatingSystem'] = 'Windows'
1428 d['PYTHON3'] = 'python3'
1429 d['ROOT'] = '%ROOT%'
1431 d['operatingSystem'] = 'Linux'
1434 f.write("# Application: " + d['application'] + "\n")
1435 if 'KERNEL' in config.APPLICATION.products:
1436 VersionSalome = src.get_salome_version(config)
1437 # Case where SALOME has the launcher that uses the SalomeContext API
1438 if VersionSalome >= 730:
1439 d['launcher'] = config.APPLICATION.profile.launcher_name
1441 d['virtual_app'] = 'runAppli' # this info is not used now)
1443 # write the specific sections
1444 if options.binaries:
1445 f.write(src.template.substitute(readme_template_path_bin, d))
1446 if "virtual_app" in d:
1447 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1449 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1452 f.write(src.template.substitute(readme_template_path_src, d))
1454 if options.binaries and options.sources and not src.architecture.is_windows():
1455 f.write(readme_compilation_with_binaries)
1458 f.write(src.template.substitute(readme_template_path_pro, d))
1461 f.write(src.template.substitute(readme_template_path_sat, d))
1465 def update_config(config, logger, prop, value):
1466 '''Remove from config.APPLICATION.products the products that have the property given as input.
1468 :param config Config: The global config.
1469 :param prop str: The property to filter
1470 :param value str: The value of the property to filter
1472 # if there is no APPLICATION (ex sat package -t) : nothing to do
1473 if "APPLICATION" in config:
1474 l_product_to_remove = []
1475 for product_name in config.APPLICATION.products.keys():
1476 prod_cfg = src.product.get_product_config(config, product_name)
1477 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1478 l_product_to_remove.append(product_name)
1479 for product_name in l_product_to_remove:
1480 config.APPLICATION.products.__delitem__(product_name)
1481 logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1484 '''method that is called when salomeTools is called with --help option.
1486 :return: The text to display for the package command description.
1490 The package command creates a tar file archive of a product.
1491 There are four kinds of archive, which can be mixed:
1493 1 - The binary archive.
1494 It contains the product installation directories plus a launcher.
1495 2 - The sources archive.
1496 It contains the product archives, a project (the application plus salomeTools).
1497 3 - The project archive.
1498 It contains a project (give the project file path as argument).
1499 4 - The salomeTools archive.
1500 It contains code utility salomeTools.
1503 >> sat package SALOME-master --binaries --sources""")
1505 def run(args, runner, logger):
1506 '''method that is called when salomeTools is called with package parameter.
1510 (options, args) = parser.parse_args(args)
1512 # Check that a type of package is called, and only one
1513 all_option_types = (options.binaries,
1515 options.project not in ["", None],
1518 # Check if no option for package type
1519 if all_option_types.count(True) == 0:
1520 msg = _("Error: Precise a type for the package\nUse one of the "
1521 "following options: --binaries, --sources, --project or"
1523 logger.write(src.printcolors.printcError(msg), 1)
1524 logger.write("\n", 1)
1527 # The repository where to put the package if not Binary or Source
1528 package_default_path = runner.cfg.LOCAL.workdir
1530 # if the package contains binaries or sources:
1531 if options.binaries or options.sources:
1532 # Check that the command has been called with an application
1533 src.check_config_has_application(runner.cfg)
1535 # Display information
1536 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1537 runner.cfg.VARS.application), 1)
1539 # Get the default directory where to put the packages
1540 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1541 src.ensure_path_exists(package_default_path)
1543 # if the package contains a project:
1545 # check that the project is visible by SAT
1546 projectNameFile = options.project + ".pyconf"
1548 for i in runner.cfg.PROJECTS.project_file_paths:
1549 baseName = os.path.basename(i)
1550 if baseName == projectNameFile:
1554 if foundProject is None:
1555 local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1556 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1560 Please add it in file:
1562 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1563 logger.write(src.printcolors.printcError(msg), 1)
1564 logger.write("\n", 1)
1567 options.project_file_path = foundProject
1568 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1570 # Remove the products that are filtered by the --without_properties option
1571 if options.without_properties:
1572 prop, value = options.without_properties
1573 update_config(runner.cfg, logger, prop, value)
1575 # Remove from config the products that have the not_in_package property
1576 update_config(runner.cfg, logger, "not_in_package", "yes")
1578 # get the name of the archive or build it
1580 if os.path.basename(options.name) == options.name:
1581 # only a name (not a path)
1582 archive_name = options.name
1583 dir_name = package_default_path
1585 archive_name = os.path.basename(options.name)
1586 dir_name = os.path.dirname(options.name)
1588 # suppress extension
1589 if archive_name[-len(".tgz"):] == ".tgz":
1590 archive_name = archive_name[:-len(".tgz")]
1591 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1592 archive_name = archive_name[:-len(".tar.gz")]
1596 dir_name = package_default_path
1597 if options.binaries or options.sources:
1598 archive_name = runner.cfg.APPLICATION.name
1600 if options.binaries:
1601 archive_name += "-"+runner.cfg.VARS.dist
1604 archive_name += "-SRC"
1605 if options.with_vcs:
1606 archive_name += "-VCS"
1609 archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1614 archive_name += ("satproject_" + options.project)
1616 if len(archive_name)==0: # no option worked
1617 msg = _("Error: Cannot name the archive\n"
1618 " check if at least one of the following options was "
1619 "selected : --binaries, --sources, --project or"
1621 logger.write(src.printcolors.printcError(msg), 1)
1622 logger.write("\n", 1)
1625 path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1627 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1629 # Create a working directory for all files that are produced during the
1630 # package creation and that will be removed at the end of the command
1631 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1632 src.ensure_path_exists(tmp_working_dir)
1633 logger.write("\n", 5)
1634 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1636 logger.write("\n", 3)
1638 msg = _("Preparation of files to add to the archive")
1639 logger.write(src.printcolors.printcLabel(msg), 2)
1640 logger.write("\n", 2)
1642 d_files_to_add={} # content of the archive
1644 # a dict to hold paths that will need to be substitute for users recompilations
1645 d_paths_to_substitute={}
1647 if options.binaries:
1648 d_bin_files_to_add = binary_package(runner.cfg,
1652 # for all binaries dir, store the substitution that will be required
1653 # for extra compilations
1654 for key in d_bin_files_to_add:
1655 if key.endswith("(bin)"):
1656 source_dir = d_bin_files_to_add[key][0]
1657 path_in_archive = d_bin_files_to_add[key][1].replace(
1658 runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1659 runner.cfg.INTERNAL.config.install_dir)
1660 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1661 # if basename is the same we will just substitute the dirname
1662 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1663 os.path.dirname(path_in_archive)
1665 d_paths_to_substitute[source_dir]=path_in_archive
1667 d_files_to_add.update(d_bin_files_to_add)
1669 d_files_to_add.update(source_package(runner,
1674 if options.binaries:
1675 # for archives with bin and sources we provide a shell script able to
1676 # install binaries for compilation
1677 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1679 d_paths_to_substitute,
1681 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1682 logger.write("substitutions that need to be done later : \n", 5)
1683 logger.write(str(d_paths_to_substitute), 5)
1684 logger.write("\n", 5)
1686 # --salomeTool option is not considered when --sources is selected, as this option
1687 # already brings salomeTool!
1689 d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir,
1693 DBG.write("config for package %s" % options.project, runner.cfg)
1694 d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1696 if not(d_files_to_add):
1697 msg = _("Error: Empty dictionnary to build the archive!\n")
1698 logger.write(src.printcolors.printcError(msg), 1)
1699 logger.write("\n", 1)
1702 # Add the README file in the package
1703 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1704 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1706 # Add the additional files of option add_files
1707 if options.add_files:
1708 for file_path in options.add_files:
1709 if not os.path.exists(file_path):
1710 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1712 file_name = os.path.basename(file_path)
1713 d_files_to_add[file_name] = (file_path, file_name)
1715 logger.write("\n", 2)
1716 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1717 logger.write("\n", 2)
1718 logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1722 # Creating the object tarfile
1723 tar = tarfile.open(path_targz, mode='w:gz')
1725 # get the filtering function if needed
1727 filter_function = exclude_VCS_and_extensions_26
1729 filter_function = exclude_VCS_and_extensions
1731 # Add the files to the tarfile object
1732 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1734 except KeyboardInterrupt:
1735 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1736 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1737 # remove the working directory
1738 shutil.rmtree(tmp_working_dir)
1739 logger.write(_("OK"), 1)
1740 logger.write(_("\n"), 1)
1743 # case if no application, only package sat as 'sat package -t'
1745 app = runner.cfg.APPLICATION
1749 # unconditionaly remove the tmp_local_working_dir
1751 tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1752 if os.path.isdir(tmp_local_working_dir):
1753 shutil.rmtree(tmp_local_working_dir)
1755 # remove the tmp directory, unless user has registered as developer
1756 if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1757 shutil.rmtree(tmp_working_dir)
1759 # Print again the path of the package
1760 logger.write("\n", 2)
1761 src.printcolors.print_value(logger, "Package path", path_targz, 2)