3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
31 from application import get_SALOME_modules
32 import src.debug as DBG
39 ARCHIVE_DIR = "ARCHIVES"
40 PROJECT_DIR = "PROJECT"
42 IGNORED_DIRS = [".git", ".svn"]
43 IGNORED_EXTENSIONS = []
45 PROJECT_TEMPLATE = """#!/usr/bin/env python
48 # The path to the archive root directory
49 root_path : $PWD + "/../"
51 project_path : $PWD + "/"
53 # Where to search the archives of the products
54 ARCHIVEPATH : $root_path + "ARCHIVES"
55 # Where to search the pyconf of the applications
56 APPLICATIONPATH : $project_path + "applications/"
57 # Where to search the pyconf of the products
58 PRODUCTPATH : $project_path + "products/"
59 # Where to search the pyconf of the jobs of the project
60 JOBPATH : $project_path + "jobs/"
61 # Where to search the pyconf of the machines of the project
62 MACHINEPATH : $project_path + "machines/"
65 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
73 archive_dir : 'default'
80 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
81 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
85 # Define all possible option for the package command : sat package <options>
86 parser = src.options.Options()
87 parser.add_option('b', 'binaries', 'boolean', 'binaries',
88 _('Optional: Produce a binary package.'), False)
89 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
90 _('Optional: Only binary package: produce the archive even if '
91 'there are some missing products.'), False)
92 parser.add_option('s', 'sources', 'boolean', 'sources',
93 _('Optional: Produce a compilable archive of the sources of the '
94 'application.'), False)
95 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
96 _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
97 'Sat prepare will use VCS mode instead to retrieve them'),
99 parser.add_option('', 'ftp', 'boolean', 'ftp',
100 _('Optional: Do not embed archives for products in archive mode.'
101 'Sat prepare will use ftp instead to retrieve them'),
103 parser.add_option('p', 'project', 'string', 'project',
104 _('Optional: Produce an archive that contains a project.'), "")
105 parser.add_option('t', 'salometools', 'boolean', 'sat',
106 _('Optional: Produce an archive that contains salomeTools.'), False)
107 parser.add_option('n', 'name', 'string', 'name',
108 _('Optional: The name or full path of the archive.'), None)
109 parser.add_option('', 'add_files', 'list2', 'add_files',
110 _('Optional: The list of additional files to add to the archive.'), [])
111 parser.add_option('', 'without_properties', 'properties', 'without_properties',
112 _('Optional: Filter the products by their properties.\n\tSyntax: '
113 '--without_properties <property>:<value>'))
116 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
117 '''Create an archive containing all directories and files that are given in
118 the d_content argument.
120 :param tar tarfile: The tarfile instance used to make the archive.
121 :param name_archive str: The name of the archive to make.
122 :param d_content dict: The dictionary that contain all directories and files
123 to add in the archive.
125 (path_on_local_machine, path_in_archive)
126 :param logger Logger: the logging instance
127 :param f_exclude Function: the function that filters
128 :return: 0 if success, 1 if not.
131 # get the max length of the messages in order to make the display
132 max_len = len(max(d_content.keys(), key=len))
135 # loop over each directory or file stored in the d_content dictionary
136 names = sorted(d_content.keys())
137 DBG.write("add tar names", names)
139 # used to avoid duplications (for pip install in python, or single_install_dir cases)
142 # display information
143 len_points = max_len - len(name) + 3
144 local_path, archive_path = d_content[name]
145 in_archive = os.path.join(name_archive, archive_path)
146 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
147 # Get the local path and the path in archive
148 # of the directory or file to add
149 # Add it in the archive
151 key=local_path+"->"+in_archive
152 if key not in already_added:
153 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
154 already_added.add(key)
155 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
156 except Exception as e:
157 logger.write(src.printcolors.printcError(_("KO ")), 3)
158 logger.write(str(e), 3)
160 logger.write("\n", 3)
163 def exclude_VCS_and_extensions(filename):
164 ''' The function that is used to exclude from package the link to the
165 VCS repositories (like .git)
167 :param filename Str: The filname to exclude (or not).
168 :return: True if the file has to be exclude
171 for dir_name in IGNORED_DIRS:
172 if dir_name in filename:
174 for extension in IGNORED_EXTENSIONS:
175 if filename.endswith(extension):
179 def produce_relative_launcher(config,
184 '''Create a specific SALOME launcher for the binary package. This launcher
187 :param config Config: The global configuration.
188 :param logger Logger: the logging instance
189 :param file_dir str: the directory where to put the launcher
190 :param file_name str: The launcher name
191 :param binaries_dir_name str: the name of the repository where the binaries
193 :return: the path of the produced launcher
197 # get KERNEL installation path
198 kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
200 # set kernel bin dir (considering fhs property)
201 kernel_cfg = src.product.get_product_config(config, "KERNEL")
202 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
203 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
205 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
207 # check if the application contains an application module
208 # check also if the application has a distene product,
209 # in this case get its licence file name
210 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
211 salome_application_name="Not defined"
212 distene_licence_file_name=False
213 for prod_name, prod_info in l_product_info:
214 # look for a "salome application" and a distene product
215 if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
216 distene_licence_file_name = src.product.product_has_licence(prod_info,
217 config.PATHS.LICENCEPATH)
218 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
219 salome_application_name=prod_info.name
221 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
222 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
223 if salome_application_name == "Not defined":
224 app_root_dir=kernel_root_dir
226 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
229 additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
230 config.VARS.sep + bin_kernel_install_dir
231 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
232 additional_env['sat_python_version'] = 3
234 additional_env['sat_python_version'] = 2
236 additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
238 # create an environment file writer
239 writer = src.environment.FileEnvWriter(config,
245 filepath = os.path.join(file_dir, file_name)
247 writer.write_env_file(filepath,
250 additional_env=additional_env,
251 no_path_init="False",
252 for_package = binaries_dir_name)
254 # Little hack to put out_dir_Path outside the strings
255 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
256 src.replace_in_file(filepath, "'out_dir_Path + ", "out_dir_Path + '" )
258 # A hack to put a call to a file for distene licence.
259 # It does nothing to an application that has no distene product
260 if distene_licence_file_name:
261 logger.write("Application has a distene licence file! We use it in package launcher", 5)
262 hack_for_distene_licence(filepath, distene_licence_file_name)
264 # change the rights in order to make the file executable for everybody
276 def hack_for_distene_licence(filepath, licence_file):
277 '''Replace the distene licence env variable by a call to a file.
279 :param filepath Str: The path to the launcher to modify.
281 shutil.move(filepath, filepath + "_old")
283 filein = filepath + "_old"
284 fin = open(filein, "r")
285 fout = open(fileout, "w")
286 text = fin.readlines()
287 # Find the Distene section
289 for i,line in enumerate(text):
290 if "# Set DISTENE License" in line:
294 # No distene product, there is nothing to do
300 del text[num_line +1]
301 del text[num_line +1]
302 text_to_insert =""" try:
303 distene_licence_file="%s"
304 if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
305 import importlib.util
306 spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
307 distene=importlib.util.module_from_spec(spec_dist)
308 spec_dist.loader.exec_module(distene)
311 distene = imp.load_source('distene_licence', distene_licence_file)
312 distene.set_distene_variables(context)
314 pass\n""" % licence_file
315 text.insert(num_line + 1, text_to_insert)
322 def produce_relative_env_files(config,
326 '''Create some specific environment files for the binary package. These
327 files use relative paths.
329 :param config Config: The global configuration.
330 :param logger Logger: the logging instance
331 :param file_dir str: the directory where to put the files
332 :param binaries_dir_name str: the name of the repository where the binaries
334 :return: the list of path of the produced environment files
337 # create an environment file writer
338 writer = src.environment.FileEnvWriter(config,
343 if src.architecture.is_windows():
345 filename = "env_launch.bat"
348 filename = "env_launch.sh"
351 filepath = writer.write_env_file(filename,
354 for_package = binaries_dir_name)
356 # Little hack to put out_dir_Path as environment variable
357 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
359 # change the rights in order to make the file executable for everybody
371 def produce_install_bin_file(config,
376 '''Create a bash shell script which do substitutions in BIRARIES dir
377 in order to use it for extra compilations.
379 :param config Config: The global configuration.
380 :param logger Logger: the logging instance
381 :param file_dir str: the directory where to put the files
382 :param d_sub, dict: the dictionnary that contains the substitutions to be done
383 :param file_name str: the name of the install script file
384 :return: the produced file
388 filepath = os.path.join(file_dir, file_name)
389 # open the file and write into it
390 # use codec utf-8 as sat variables are in unicode
391 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
392 installbin_template_path = os.path.join(config.VARS.internal_dir,
393 "INSTALL_BIN.template")
395 # build the name of the directory that will contain the binaries
396 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
397 # build the substitution loop
398 loop_cmd = "for f in $(grep -RIl"
400 loop_cmd += " -e "+ key
401 loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
404 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
405 loop_cmd += ' " $f\ndone'
408 d["BINARIES_DIR"] = binaries_dir_name
409 d["SUBSTITUTION_LOOP"]=loop_cmd
410 d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
412 # substitute the template and write it in file
413 content=src.template.substitute(installbin_template_path, d)
414 installbin_file.write(content)
415 # change the rights in order to make the file executable for everybody
427 def product_appli_creation_script(config,
431 '''Create a script that can produce an application (EDF style) in the binary
434 :param config Config: The global configuration.
435 :param logger Logger: the logging instance
436 :param file_dir str: the directory where to put the file
437 :param binaries_dir_name str: the name of the repository where the binaries
439 :return: the path of the produced script file
442 template_name = "create_appli.py.for_bin_packages.template"
443 template_path = os.path.join(config.VARS.internal_dir, template_name)
444 text_to_fill = open(template_path, "r").read()
445 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
446 '"' + binaries_dir_name + '"')
449 for product_name in get_SALOME_modules(config):
450 product_info = src.product.get_product_config(config, product_name)
452 if src.product.product_is_smesh_plugin(product_info):
455 if 'install_dir' in product_info and bool(product_info.install_dir):
456 if src.product.product_is_cpp(product_info):
458 for cpp_name in src.product.get_product_components(product_info):
459 line_to_add = ("<module name=\"" +
461 "\" gui=\"yes\" path=\"''' + "
462 "os.path.join(dir_bin_name, \"" +
463 cpp_name + "\") + '''\"/>")
466 line_to_add = ("<module name=\"" +
468 "\" gui=\"yes\" path=\"''' + "
469 "os.path.join(dir_bin_name, \"" +
470 product_name + "\") + '''\"/>")
471 text_to_add += line_to_add + "\n"
473 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
475 tmp_file_path = os.path.join(file_dir, "create_appli.py")
476 ff = open(tmp_file_path, "w")
477 ff.write(filled_text)
480 # change the rights in order to make the file executable for everybody
481 os.chmod(tmp_file_path,
492 def binary_package(config, logger, options, tmp_working_dir):
493 '''Prepare a dictionary that stores all the needed directories and files to
494 add in a binary package.
496 :param config Config: The global configuration.
497 :param logger Logger: the logging instance
498 :param options OptResult: the options of the launched command
499 :param tmp_working_dir str: The temporary local directory containing some
500 specific directories or files needed in the
502 :return: the dictionary that stores all the needed directories and files to
503 add in a binary package.
504 {label : (path_on_local_machine, path_in_archive)}
508 # Get the list of product installation to add to the archive
509 l_products_name = sorted(config.APPLICATION.products.keys())
510 l_product_info = src.product.get_products_infos(l_products_name,
515 l_sources_not_present = []
516 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
517 if ("APPLICATION" in config and
518 "properties" in config.APPLICATION and
519 "mesa_launcher_in_package" in config.APPLICATION.properties and
520 config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
521 generate_mesa_launcher=True
523 for prod_name, prod_info in l_product_info:
524 # skip product with property not_in_package set to yes
525 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
528 # Add the sources of the products that have the property
529 # sources_in_package : "yes"
530 if src.get_property_in_product_cfg(prod_info,
531 "sources_in_package") == "yes":
532 if os.path.exists(prod_info.source_dir):
533 l_source_dir.append((prod_name, prod_info.source_dir))
535 l_sources_not_present.append(prod_name)
537 # ignore the native and fixed products for install directories
538 if (src.product.product_is_native(prod_info)
539 or src.product.product_is_fixed(prod_info)
540 or not src.product.product_compiles(prod_info)):
542 if src.product.check_installation(config, prod_info):
543 l_install_dir.append((prod_name, prod_info.install_dir))
545 l_not_installed.append(prod_name)
547 # Add also the cpp generated modules (if any)
548 if src.product.product_is_cpp(prod_info):
550 for name_cpp in src.product.get_product_components(prod_info):
551 install_dir = os.path.join(config.APPLICATION.workdir,
552 config.INTERNAL.config.install_dir,
554 if os.path.exists(install_dir):
555 l_install_dir.append((name_cpp, install_dir))
557 l_not_installed.append(name_cpp)
559 # check the name of the directory that (could) contains the binaries
560 # from previous detar
561 binaries_from_detar = os.path.join(
562 config.APPLICATION.workdir,
563 config.INTERNAL.config.binary_dir + config.VARS.dist)
564 if os.path.exists(binaries_from_detar):
566 WARNING: existing binaries directory from previous detar installation:
568 To make new package from this, you have to:
569 1) install binaries in INSTALL directory with the script "install_bin.sh"
570 see README file for more details
571 2) or recompile everything in INSTALL with "sat compile" command
572 this step is long, and requires some linux packages to be installed
574 """ % binaries_from_detar)
576 # Print warning or error if there are some missing products
577 if len(l_not_installed) > 0:
578 text_missing_prods = ""
579 for p_name in l_not_installed:
580 text_missing_prods += "-" + p_name + "\n"
581 if not options.force_creation:
582 msg = _("ERROR: there are missing products installations:")
583 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
588 msg = _("WARNING: there are missing products installations:")
589 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
593 # Do the same for sources
594 if len(l_sources_not_present) > 0:
595 text_missing_prods = ""
596 for p_name in l_sources_not_present:
597 text_missing_prods += "-" + p_name + "\n"
598 if not options.force_creation:
599 msg = _("ERROR: there are missing products sources:")
600 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
605 msg = _("WARNING: there are missing products sources:")
606 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
610 # construct the name of the directory that will contain the binaries
611 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
613 # construct the correlation table between the product names, there
614 # actual install directories and there install directory in archive
616 for prod_name, install_dir in l_install_dir:
617 path_in_archive = os.path.join(binaries_dir_name, os.path.basename(install_dir))
618 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
620 for prod_name, source_dir in l_source_dir:
621 path_in_archive = os.path.join("SOURCES", prod_name)
622 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
624 # for packages of SALOME applications including KERNEL,
625 # we produce a salome launcher or a virtual application (depending on salome version)
626 if 'KERNEL' in config.APPLICATION.products:
627 VersionSalome = src.get_salome_version(config)
628 # Case where SALOME has the launcher that uses the SalomeContext API
629 if VersionSalome >= 730:
630 # create the relative launcher and add it to the files to add
631 launcher_name = src.get_launcher_name(config)
632 launcher_package = produce_relative_launcher(config,
637 d_products["launcher"] = (launcher_package, launcher_name)
639 # if the application contains mesa products, we generate in addition to the
640 # classical salome launcher a launcher using mesa and called mesa_salome
641 # (the mesa launcher will be used for remote usage through ssh).
642 if generate_mesa_launcher:
643 #if there is one : store the use_mesa property
644 restore_use_mesa_option=None
645 if ('properties' in config.APPLICATION and
646 'use_mesa' in config.APPLICATION.properties):
647 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
649 # activate mesa property, and generate a mesa launcher
650 src.activate_mesa_property(config) #activate use_mesa property
651 launcher_mesa_name="mesa_"+launcher_name
652 launcher_package_mesa = produce_relative_launcher(config,
657 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
659 # if there was a use_mesa value, we restore it
660 # else we set it to the default value "no"
661 if restore_use_mesa_option != None:
662 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
664 config.APPLICATION.properties.use_mesa="no"
667 # if we mix binaries and sources, we add a copy of the launcher,
668 # prefixed with "bin",in order to avoid clashes
669 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
671 # Provide a script for the creation of an application EDF style
672 appli_script = product_appli_creation_script(config,
677 d_products["appli script"] = (appli_script, "create_appli.py")
679 # Put also the environment file
680 env_file = produce_relative_env_files(config,
685 if src.architecture.is_windows():
686 filename = "env_launch.bat"
688 filename = "env_launch.sh"
689 d_products["environment file"] = (env_file, filename)
693 def source_package(sat, config, logger, options, tmp_working_dir):
694 '''Prepare a dictionary that stores all the needed directories and files to
695 add in a source package.
697 :param config Config: The global configuration.
698 :param logger Logger: the logging instance
699 :param options OptResult: the options of the launched command
700 :param tmp_working_dir str: The temporary local directory containing some
701 specific directories or files needed in the
703 :return: the dictionary that stores all the needed directories and files to
704 add in a source package.
705 {label : (path_on_local_machine, path_in_archive)}
710 # Get all the products that are prepared using an archive
711 # unless ftp mode is specified (in this case the user of the
712 # archive will get the sources through the ftp mode of sat prepare
714 logger.write("Find archive products ... ")
715 d_archives, l_pinfo_vcs = get_archives(config, logger)
716 logger.write("Done\n")
719 if not options.with_vcs and len(l_pinfo_vcs) > 0:
720 # Make archives with the products that are not prepared using an archive
721 # (git, cvs, svn, etc)
722 logger.write("Construct archives for vcs products ... ")
723 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
728 logger.write("Done\n")
731 logger.write("Create the project ... ")
732 d_project = create_project_for_src_package(config,
736 logger.write("Done\n")
739 tmp_sat = add_salomeTools(config, tmp_working_dir)
740 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
742 # Add a sat symbolic link if not win
743 if not src.architecture.is_windows():
744 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
748 # In the jobs, os.getcwd() can fail
749 t = config.LOCAL.workdir
750 os.chdir(tmp_working_dir)
751 if os.path.lexists(tmp_satlink_path):
752 os.remove(tmp_satlink_path)
753 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
756 d_sat["sat link"] = (tmp_satlink_path, "sat")
758 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
761 def get_archives(config, logger):
762 '''Find all the products that are get using an archive and all the products
763 that are get using a vcs (git, cvs, svn) repository.
765 :param config Config: The global configuration.
766 :param logger Logger: the logging instance
767 :return: the dictionary {name_product :
768 (local path of its archive, path in the package of its archive )}
769 and the list of specific configuration corresponding to the vcs
773 # Get the list of product informations
774 l_products_name = config.APPLICATION.products.keys()
775 l_product_info = src.product.get_products_infos(l_products_name,
779 for p_name, p_info in l_product_info:
780 # skip product with property not_in_package set to yes
781 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
783 # ignore the native and fixed products
784 if (src.product.product_is_native(p_info)
785 or src.product.product_is_fixed(p_info)):
787 if p_info.get_source == "archive":
788 archive_path = p_info.archive_info.archive_name
789 archive_name = os.path.basename(archive_path)
790 d_archives[p_name] = (archive_path,
791 os.path.join(ARCHIVE_DIR, archive_name))
792 if (src.appli_test_property(config,"pip", "yes") and
793 src.product.product_test_property(p_info,"pip", "yes")):
794 # if pip mode is activated, and product is managed by pip
795 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
796 pip_wheel_pattern=os.path.join(pip_wheels_dir,
797 "%s-%s*" % (p_info.name, p_info.version))
798 pip_wheel_path=glob.glob(pip_wheel_pattern)
799 msg_pip_not_found="Error in get_archive, pip wheel for "\
800 "product %s-%s was not found in %s directory"
801 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
802 "product %s-%s were found in %s directory"
803 if len(pip_wheel_path)==0:
804 raise src.SatException(msg_pip_not_found %\
805 (p_info.name, p_info.version, pip_wheels_dir))
806 if len(pip_wheel_path)>1:
807 raise src.SatException(msg_pip_two_or_more %\
808 (p_info.name, p_info.version, pip_wheels_dir))
810 pip_wheel_name=os.path.basename(pip_wheel_path[0])
811 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0],
812 os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
814 # this product is not managed by archive,
815 # an archive of the vcs directory will be created by get_archive_vcs
816 l_pinfo_vcs.append((p_name, p_info))
818 return d_archives, l_pinfo_vcs
820 def add_salomeTools(config, tmp_working_dir):
821 '''Prepare a version of salomeTools that has a specific local.pyconf file
822 configured for a source package.
824 :param config Config: The global configuration.
825 :param tmp_working_dir str: The temporary local directory containing some
826 specific directories or files needed in the
828 :return: The path to the local salomeTools directory to add in the package
831 # Copy sat in the temporary working directory
832 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
833 sat_running_path = src.Path(config.VARS.salometoolsway)
834 sat_running_path.copy(sat_tmp_path)
836 # Update the local.pyconf file that contains the path to the project
837 local_pyconf_name = "local.pyconf"
838 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
839 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
840 # Remove the .pyconf file in the root directory of salomeTools if there is
841 # any. (For example when launching jobs, a pyconf file describing the jobs
842 # can be here and is not useful)
843 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
844 for file_or_dir in files_or_dir_SAT:
845 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
846 file_path = os.path.join(tmp_working_dir,
851 ff = open(local_pyconf_file, "w")
852 ff.write(LOCAL_TEMPLATE)
855 return sat_tmp_path.path
857 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
858 '''For sources package that require that all products are get using an
859 archive, one has to create some archive for the vcs products.
860 So this method calls the clean and source command of sat and then create
863 :param l_pinfo_vcs List: The list of specific configuration corresponding to
865 :param sat Sat: The Sat instance that can be called to clean and source the
867 :param config Config: The global configuration.
868 :param logger Logger: the logging instance
869 :param tmp_working_dir str: The temporary local directory containing some
870 specific directories or files needed in the
872 :return: the dictionary that stores all the archives to add in the source
873 package. {label : (path_on_local_machine, path_in_archive)}
876 # clean the source directory of all the vcs products, then use the source
877 # command and thus construct an archive that will not contain the patches
878 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
879 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
880 logger.write(_("\nclean sources\n"))
881 args_clean = config.VARS.application
882 args_clean += " --sources --products "
883 args_clean += ",".join(l_prod_names)
884 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
885 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
888 logger.write(_("get sources\n"))
889 args_source = config.VARS.application
890 args_source += " --products "
891 args_source += ",".join(l_prod_names)
892 svgDir = sat.cfg.APPLICATION.workdir
893 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
894 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
895 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
896 # DBG.write("sat config id", id(sat.cfg), True)
897 # shit as config is not same id() as for sat.source()
898 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
900 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
902 # make the new archives
904 for pn, pinfo in l_pinfo_vcs:
905 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
906 logger.write("make archive vcs '%s'\n" % path_archive)
907 d_archives_vcs[pn] = (path_archive,
908 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
909 sat.cfg.APPLICATION.workdir = svgDir
910 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
911 return d_archives_vcs
913 def make_archive(prod_name, prod_info, where):
914 '''Create an archive of a product by searching its source directory.
916 :param prod_name str: The name of the product.
917 :param prod_info Config: The specific configuration corresponding to the
919 :param where str: The path of the repository where to put the resulting
921 :return: The path of the resulting archive
924 path_targz_prod = os.path.join(where, prod_name + ".tgz")
925 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
926 local_path = prod_info.source_dir
927 tar_prod.add(local_path,
929 exclude=exclude_VCS_and_extensions)
931 return path_targz_prod
933 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
934 '''Create a specific project for a source package.
936 :param config Config: The global configuration.
937 :param tmp_working_dir str: The temporary local directory containing some
938 specific directories or files needed in the
940 :param with_vcs boolean: True if the package is with vcs products (not
941 transformed into archive products)
942 :param with_ftp boolean: True if the package use ftp servers to get archives
943 :return: The dictionary
944 {"project" : (produced project, project path in the archive)}
948 # Create in the working temporary directory the full project tree
949 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
950 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
952 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
955 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
958 patches_tmp_dir = os.path.join(project_tmp_dir,
961 application_tmp_dir = os.path.join(project_tmp_dir,
963 for directory in [project_tmp_dir,
964 compil_scripts_tmp_dir,
967 application_tmp_dir]:
968 src.ensure_path_exists(directory)
970 # Create the pyconf that contains the information of the project
971 project_pyconf_name = "project.pyconf"
972 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
973 ff = open(project_pyconf_file, "w")
974 ff.write(PROJECT_TEMPLATE)
975 if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
976 ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
977 for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
978 ftp_path=ftp_path+":"+ftpserver
980 ff.write("# ftp servers where to search for prerequisite archives\n")
982 # add licence paths if any
983 if len(config.PATHS.LICENCEPATH) > 0:
984 licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
985 for path in config.PATHS.LICENCEPATH[1:]:
986 licence_path=licence_path+":"+path
988 ff.write("\n# Where to search for licences\n")
989 ff.write(licence_path)
994 # Loop over the products to get there pyconf and all the scripts
995 # (compilation, environment, patches)
996 # and create the pyconf file to add to the project
997 lproducts_name = config.APPLICATION.products.keys()
998 l_products = src.product.get_products_infos(lproducts_name, config)
999 for p_name, p_info in l_products:
1000 # skip product with property not_in_package set to yes
1001 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1003 find_product_scripts_and_pyconf(p_name,
1007 compil_scripts_tmp_dir,
1008 env_scripts_tmp_dir,
1010 products_pyconf_tmp_dir)
1012 find_application_pyconf(config, application_tmp_dir)
1014 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1017 def find_product_scripts_and_pyconf(p_name,
1021 compil_scripts_tmp_dir,
1022 env_scripts_tmp_dir,
1024 products_pyconf_tmp_dir):
1025 '''Create a specific pyconf file for a given product. Get its environment
1026 script, its compilation script and patches and put it in the temporary
1027 working directory. This method is used in the source package in order to
1028 construct the specific project.
1030 :param p_name str: The name of the product.
1031 :param p_info Config: The specific configuration corresponding to the
1033 :param config Config: The global configuration.
1034 :param with_vcs boolean: True if the package is with vcs products (not
1035 transformed into archive products)
1036 :param compil_scripts_tmp_dir str: The path to the temporary compilation
1037 scripts directory of the project.
1038 :param env_scripts_tmp_dir str: The path to the temporary environment script
1039 directory of the project.
1040 :param patches_tmp_dir str: The path to the temporary patch scripts
1041 directory of the project.
1042 :param products_pyconf_tmp_dir str: The path to the temporary product
1043 scripts directory of the project.
1046 # read the pyconf of the product
1047 product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
1048 config.PATHS.PRODUCTPATH)
1049 product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
1051 # find the compilation script if any
1052 if src.product.product_has_script(p_info):
1053 compil_script_path = src.Path(p_info.compil_script)
1054 compil_script_path.copy(compil_scripts_tmp_dir)
1055 product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
1056 p_info.compil_script)
1057 # find the environment script if any
1058 if src.product.product_has_env_script(p_info):
1059 env_script_path = src.Path(p_info.environ.env_script)
1060 env_script_path.copy(env_scripts_tmp_dir)
1061 product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
1062 p_info.environ.env_script)
1063 # find the patches if any
1064 if src.product.product_has_patches(p_info):
1065 patches = src.pyconf.Sequence()
1066 for patch_path in p_info.patches:
1067 p_path = src.Path(patch_path)
1068 p_path.copy(patches_tmp_dir)
1069 patches.append(os.path.basename(patch_path), "")
1071 product_pyconf_cfg[p_info.section].patches = patches
1074 # put in the pyconf file the resolved values
1075 for info in ["git_info", "cvs_info", "svn_info"]:
1077 for key in p_info[info]:
1078 product_pyconf_cfg[p_info.section][info][key] = p_info[
1081 # if the product is not archive, then make it become archive.
1082 if src.product.product_is_vcs(p_info):
1083 product_pyconf_cfg[p_info.section].get_source = "archive"
1084 if not "archive_info" in product_pyconf_cfg[p_info.section]:
1085 product_pyconf_cfg[p_info.section].addMapping("archive_info",
1086 src.pyconf.Mapping(product_pyconf_cfg),
1088 product_pyconf_cfg[p_info.section
1089 ].archive_info.archive_name = p_info.name + ".tgz"
1091 # write the pyconf file to the temporary project location
1092 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1094 ff = open(product_tmp_pyconf_path, 'w')
1095 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1096 product_pyconf_cfg.__save__(ff, 1)
1099 def find_application_pyconf(config, application_tmp_dir):
1100 '''Find the application pyconf file and put it in the specific temporary
1101 directory containing the specific project of a source package.
1103 :param config Config: The global configuration.
1104 :param application_tmp_dir str: The path to the temporary application
1105 scripts directory of the project.
1107 # read the pyconf of the application
1108 application_name = config.VARS.application
1109 application_pyconf_path = src.find_file_in_lpath(
1110 application_name + ".pyconf",
1111 config.PATHS.APPLICATIONPATH)
1112 application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1114 # Change the workdir
1115 application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1116 application_pyconf_cfg,
1118 'VARS.salometoolsway + $VARS.sep + ".."')
1120 # Prevent from compilation in base
1121 application_pyconf_cfg.APPLICATION.no_base = "yes"
1123 #remove products that are not in config (which were filtered by --without_properties)
1124 for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1125 if product_name not in config.APPLICATION.products.keys():
1126 application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1128 # write the pyconf file to the temporary application location
1129 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1130 application_name + ".pyconf")
1132 ff = open(application_tmp_pyconf_path, 'w')
1133 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1134 application_pyconf_cfg.__save__(ff, 1)
1137 def sat_package(config, tmp_working_dir, options, logger):
1138 '''Prepare a dictionary that stores all the needed directories and files to
1139 add in a salomeTool package.
1141 :param tmp_working_dir str: The temporary local working directory
1142 :param options OptResult: the options of the launched command
1143 :return: the dictionary that stores all the needed directories and files to
1144 add in a salomeTool package.
1145 {label : (path_on_local_machine, path_in_archive)}
1150 # we include sat himself
1151 d_project["all_sat"]=(config.VARS.salometoolsway, "")
1153 # and we overwrite local.pyconf with a clean wersion.
1154 local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1155 local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1156 local_cfg = src.pyconf.Config(local_file_path)
1157 local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1158 local_cfg.LOCAL["base"] = "default"
1159 local_cfg.LOCAL["workdir"] = "default"
1160 local_cfg.LOCAL["log_dir"] = "default"
1161 local_cfg.LOCAL["archive_dir"] = "default"
1162 local_cfg.LOCAL["VCS"] = "None"
1163 local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1165 # if the archive contains a project, we write its relative path in local.pyconf
1167 project_arch_path = os.path.join("projects", options.project,
1168 os.path.basename(options.project_file_path))
1169 local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1171 ff = open(local_pyconf_tmp_path, 'w')
1172 local_cfg.__save__(ff, 1)
1174 d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1178 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1179 '''Prepare a dictionary that stores all the needed directories and files to
1180 add in a project package.
1182 :param project_file_path str: The path to the local project.
1183 :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1184 :param tmp_working_dir str: The temporary local directory containing some
1185 specific directories or files needed in the
1187 :param embedded_in_sat boolean : the project package is embedded in a sat package
1188 :return: the dictionary that stores all the needed directories and files to
1189 add in a project package.
1190 {label : (path_on_local_machine, path_in_archive)}
1194 # Read the project file and get the directories to add to the package
1197 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1200 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1201 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1202 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1204 paths = {"APPLICATIONPATH" : "applications",
1205 "PRODUCTPATH" : "products",
1207 "MACHINEPATH" : "machines"}
1209 paths["ARCHIVEPATH"] = "archives"
1211 # Loop over the project paths and add it
1212 project_file_name = os.path.basename(project_file_path)
1214 if path not in project_pyconf_cfg:
1217 dest_path = os.path.join("projects", name_project, paths[path])
1218 project_file_dest = os.path.join("projects", name_project, project_file_name)
1220 dest_path = paths[path]
1221 project_file_dest = project_file_name
1223 # Add the directory to the files to add in the package
1224 d_project[path] = (project_pyconf_cfg[path], dest_path)
1226 # Modify the value of the path in the package
1227 project_pyconf_cfg[path] = src.pyconf.Reference(
1230 'project_path + "/' + paths[path] + '"')
1232 # Modify some values
1233 if "project_path" not in project_pyconf_cfg:
1234 project_pyconf_cfg.addMapping("project_path",
1235 src.pyconf.Mapping(project_pyconf_cfg),
1237 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1240 # we don't want to export these two fields
1241 project_pyconf_cfg.__delitem__("file_path")
1242 project_pyconf_cfg.__delitem__("PWD")
1244 project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1246 # Write the project pyconf file
1247 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1248 ff = open(project_pyconf_tmp_path, 'w')
1249 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1250 project_pyconf_cfg.__save__(ff, 1)
1252 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1256 def add_readme(config, options, where):
1257 readme_path = os.path.join(where, "README")
1258 with codecs.open(readme_path, "w", 'utf-8') as f:
1260 # templates for building the header
1262 # This package was generated with sat $version
1265 # Distribution : $dist
1267 In the following, $$ROOT represents the directory where you have installed
1268 SALOME (the directory where this file is located).
1271 readme_compilation_with_binaries="""
1273 compilation based on the binaries used as prerequisites
1274 =======================================================
1276 If you fail to compile the complete application (for example because
1277 you are not root on your system and cannot install missing packages), you
1278 may try a partial compilation based on the binaries.
1279 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1280 and do some substitutions on cmake and .la files (replace the build directories
1282 The procedure to do it is:
1283 1) Remove or rename INSTALL directory if it exists
1284 2) Execute the shell script install_bin.sh:
1287 3) Use SalomeTool (as explained in Sources section) and compile only the
1288 modules you need to (with -p option)
1291 readme_header_tpl=string.Template(readme_header)
1292 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1293 "README_BIN.template")
1294 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1295 "README_LAUNCHER.template")
1296 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1297 "README_BIN_VIRTUAL_APP.template")
1298 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1299 "README_SRC.template")
1300 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1301 "README_PROJECT.template")
1302 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1303 "README_SAT.template")
1305 # prepare substitution dictionary
1307 d['user'] = config.VARS.user
1308 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1309 d['version'] = src.get_salometool_version(config)
1310 d['dist'] = config.VARS.dist
1311 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1313 if options.binaries or options.sources:
1314 d['application'] = config.VARS.application
1315 f.write("# Application: " + d['application'] + "\n")
1316 if 'KERNEL' in config.APPLICATION.products:
1317 VersionSalome = src.get_salome_version(config)
1318 # Case where SALOME has the launcher that uses the SalomeContext API
1319 if VersionSalome >= 730:
1320 d['launcher'] = config.APPLICATION.profile.launcher_name
1322 d['virtual_app'] = 'runAppli' # this info is not used now)
1324 # write the specific sections
1325 if options.binaries:
1326 f.write(src.template.substitute(readme_template_path_bin, d))
1327 if "virtual_app" in d:
1328 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1330 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1333 f.write(src.template.substitute(readme_template_path_src, d))
1335 if options.binaries and options.sources:
1336 f.write(readme_compilation_with_binaries)
1339 f.write(src.template.substitute(readme_template_path_pro, d))
1342 f.write(src.template.substitute(readme_template_path_sat, d))
1346 def update_config(config, prop, value):
1347 '''Remove from config.APPLICATION.products the products that have the property given as input.
1349 :param config Config: The global config.
1350 :param prop str: The property to filter
1351 :param value str: The value of the property to filter
1353 # if there is no APPLICATION (ex sat package -t) : nothing to do
1354 if "APPLICATION" in config:
1355 l_product_to_remove = []
1356 for product_name in config.APPLICATION.products.keys():
1357 prod_cfg = src.product.get_product_config(config, product_name)
1358 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1359 l_product_to_remove.append(product_name)
1360 for product_name in l_product_to_remove:
1361 config.APPLICATION.products.__delitem__(product_name)
1364 '''method that is called when salomeTools is called with --help option.
1366 :return: The text to display for the package command description.
1370 The package command creates a tar file archive of a product.
1371 There are four kinds of archive, which can be mixed:
1373 1 - The binary archive.
1374 It contains the product installation directories plus a launcher.
1375 2 - The sources archive.
1376 It contains the product archives, a project (the application plus salomeTools).
1377 3 - The project archive.
1378 It contains a project (give the project file path as argument).
1379 4 - The salomeTools archive.
1380 It contains code utility salomeTools.
1383 >> sat package SALOME-master --binaries --sources""")
1385 def run(args, runner, logger):
1386 '''method that is called when salomeTools is called with package parameter.
1390 (options, args) = parser.parse_args(args)
1392 # Check that a type of package is called, and only one
1393 all_option_types = (options.binaries,
1395 options.project not in ["", None],
1398 # Check if no option for package type
1399 if all_option_types.count(True) == 0:
1400 msg = _("Error: Precise a type for the package\nUse one of the "
1401 "following options: --binaries, --sources, --project or"
1403 logger.write(src.printcolors.printcError(msg), 1)
1404 logger.write("\n", 1)
1407 # The repository where to put the package if not Binary or Source
1408 package_default_path = runner.cfg.LOCAL.workdir
1410 # if the package contains binaries or sources:
1411 if options.binaries or options.sources:
1412 # Check that the command has been called with an application
1413 src.check_config_has_application(runner.cfg)
1415 # Display information
1416 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1417 runner.cfg.VARS.application), 1)
1419 # Get the default directory where to put the packages
1420 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1421 src.ensure_path_exists(package_default_path)
1423 # if the package contains a project:
1425 # check that the project is visible by SAT
1426 projectNameFile = options.project + ".pyconf"
1428 for i in runner.cfg.PROJECTS.project_file_paths:
1429 baseName = os.path.basename(i)
1430 if baseName == projectNameFile:
1434 if foundProject is None:
1435 local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1436 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1440 Please add it in file:
1442 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1443 logger.write(src.printcolors.printcError(msg), 1)
1444 logger.write("\n", 1)
1447 options.project_file_path = foundProject
1448 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1450 # Remove the products that are filtered by the --without_properties option
1451 if options.without_properties:
1452 app = runner.cfg.APPLICATION
1453 logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1454 prop, value = options.without_properties
1455 update_config(runner.cfg, prop, value)
1456 logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1458 # Remove from config the products that have the not_in_package property
1459 update_config(runner.cfg, "not_in_package", "yes")
1461 # get the name of the archive or build it
1463 if os.path.basename(options.name) == options.name:
1464 # only a name (not a path)
1465 archive_name = options.name
1466 dir_name = package_default_path
1468 archive_name = os.path.basename(options.name)
1469 dir_name = os.path.dirname(options.name)
1471 # suppress extension
1472 if archive_name[-len(".tgz"):] == ".tgz":
1473 archive_name = archive_name[:-len(".tgz")]
1474 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1475 archive_name = archive_name[:-len(".tar.gz")]
1479 dir_name = package_default_path
1480 if options.binaries or options.sources:
1481 archive_name = runner.cfg.APPLICATION.name
1483 if options.binaries:
1484 archive_name += "-"+runner.cfg.VARS.dist
1487 archive_name += "-SRC"
1488 if options.with_vcs:
1489 archive_name += "-VCS"
1492 archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1497 project_name = options.project
1498 archive_name += ("satproject_" + project_name)
1500 if len(archive_name)==0: # no option worked
1501 msg = _("Error: Cannot name the archive\n"
1502 " check if at least one of the following options was "
1503 "selected : --binaries, --sources, --project or"
1505 logger.write(src.printcolors.printcError(msg), 1)
1506 logger.write("\n", 1)
1509 path_targz = os.path.join(dir_name, archive_name + ".tgz")
1511 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1513 # Create a working directory for all files that are produced during the
1514 # package creation and that will be removed at the end of the command
1515 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1516 src.ensure_path_exists(tmp_working_dir)
1517 logger.write("\n", 5)
1518 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1520 logger.write("\n", 3)
1522 msg = _("Preparation of files to add to the archive")
1523 logger.write(src.printcolors.printcLabel(msg), 2)
1524 logger.write("\n", 2)
1526 d_files_to_add={} # content of the archive
1528 # a dict to hold paths that will need to be substitute for users recompilations
1529 d_paths_to_substitute={}
1531 if options.binaries:
1532 d_bin_files_to_add = binary_package(runner.cfg,
1536 # for all binaries dir, store the substitution that will be required
1537 # for extra compilations
1538 for key in d_bin_files_to_add:
1539 if key.endswith("(bin)"):
1540 source_dir = d_bin_files_to_add[key][0]
1541 path_in_archive = d_bin_files_to_add[key][1].replace(
1542 runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1543 runner.cfg.INTERNAL.config.install_dir)
1544 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1545 # if basename is the same we will just substitute the dirname
1546 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1547 os.path.dirname(path_in_archive)
1549 d_paths_to_substitute[source_dir]=path_in_archive
1551 d_files_to_add.update(d_bin_files_to_add)
1554 d_files_to_add.update(source_package(runner,
1559 if options.binaries:
1560 # for archives with bin and sources we provide a shell script able to
1561 # install binaries for compilation
1562 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1564 d_paths_to_substitute,
1566 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1567 logger.write("substitutions that need to be done later : \n", 5)
1568 logger.write(str(d_paths_to_substitute), 5)
1569 logger.write("\n", 5)
1571 # --salomeTool option is not considered when --sources is selected, as this option
1572 # already brings salomeTool!
1574 d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir,
1578 DBG.write("config for package %s" % project_name, runner.cfg)
1579 d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1581 if not(d_files_to_add):
1582 msg = _("Error: Empty dictionnary to build the archive!\n")
1583 logger.write(src.printcolors.printcError(msg), 1)
1584 logger.write("\n", 1)
1587 # Add the README file in the package
1588 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1589 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1591 # Add the additional files of option add_files
1592 if options.add_files:
1593 for file_path in options.add_files:
1594 if not os.path.exists(file_path):
1595 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1597 file_name = os.path.basename(file_path)
1598 d_files_to_add[file_name] = (file_path, file_name)
1600 logger.write("\n", 2)
1601 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1602 logger.write("\n", 2)
1603 logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1607 # Creating the object tarfile
1608 tar = tarfile.open(path_targz, mode='w:gz')
1610 # get the filtering function if needed
1611 filter_function = exclude_VCS_and_extensions
1613 # Add the files to the tarfile object
1614 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1616 except KeyboardInterrupt:
1617 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1618 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1619 # remove the working directory
1620 shutil.rmtree(tmp_working_dir)
1621 logger.write(_("OK"), 1)
1622 logger.write(_("\n"), 1)
1625 # case if no application, only package sat as 'sat package -t'
1627 app = runner.cfg.APPLICATION
1631 # unconditionaly remove the tmp_local_working_dir
1633 tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1634 if os.path.isdir(tmp_local_working_dir):
1635 shutil.rmtree(tmp_local_working_dir)
1637 # remove the tmp directory, unless user has registered as developer
1638 if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1639 shutil.rmtree(tmp_working_dir)
1641 # Print again the path of the package
1642 logger.write("\n", 2)
1643 src.printcolors.print_value(logger, "Package path", path_targz, 2)