3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
31 from application import get_SALOME_modules
32 import src.debug as DBG
39 ARCHIVE_DIR = "ARCHIVES"
40 PROJECT_DIR = "PROJECT"
42 IGNORED_DIRS = [".git", ".svn"]
43 IGNORED_EXTENSIONS = []
45 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
47 PROJECT_TEMPLATE = """#!/usr/bin/env python
50 # The path to the archive root directory
51 root_path : $PWD + "/../"
53 project_path : $PWD + "/"
55 # Where to search the archives of the products
56 ARCHIVEPATH : $root_path + "ARCHIVES"
57 # Where to search the pyconf of the applications
58 APPLICATIONPATH : $project_path + "applications/"
59 # Where to search the pyconf of the products
60 PRODUCTPATH : $project_path + "products/"
61 # Where to search the pyconf of the jobs of the project
62 JOBPATH : $project_path + "jobs/"
63 # Where to search the pyconf of the machines of the project
64 MACHINEPATH : $project_path + "machines/"
67 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
75 archive_dir : 'default'
82 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
83 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
87 # Define all possible option for the package command : sat package <options>
88 parser = src.options.Options()
89 parser.add_option('b', 'binaries', 'boolean', 'binaries',
90 _('Optional: Produce a binary package.'), False)
91 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
92 _('Optional: Only binary package: produce the archive even if '
93 'there are some missing products.'), False)
94 parser.add_option('s', 'sources', 'boolean', 'sources',
95 _('Optional: Produce a compilable archive of the sources of the '
96 'application.'), False)
97 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
98 _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
99 'Sat prepare will use VCS mode instead to retrieve them'),
101 parser.add_option('', 'ftp', 'boolean', 'ftp',
102 _('Optional: Do not embed archives for products in archive mode.'
103 'Sat prepare will use ftp instead to retrieve them'),
105 parser.add_option('p', 'project', 'string', 'project',
106 _('Optional: Produce an archive that contains a project.'), "")
107 parser.add_option('t', 'salometools', 'boolean', 'sat',
108 _('Optional: Produce an archive that contains salomeTools.'), False)
109 parser.add_option('n', 'name', 'string', 'name',
110 _('Optional: The name or full path of the archive.'), None)
111 parser.add_option('', 'add_files', 'list2', 'add_files',
112 _('Optional: The list of additional files to add to the archive.'), [])
113 parser.add_option('', 'without_properties', 'properties', 'without_properties',
114 _('Optional: Filter the products by their properties.\n\tSyntax: '
115 '--without_properties <property>:<value>'))
118 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
119 '''Create an archive containing all directories and files that are given in
120 the d_content argument.
122 :param tar tarfile: The tarfile instance used to make the archive.
123 :param name_archive str: The name of the archive to make.
124 :param d_content dict: The dictionary that contain all directories and files
125 to add in the archive.
127 (path_on_local_machine, path_in_archive)
128 :param logger Logger: the logging instance
129 :param f_exclude Function: the function that filters
130 :return: 0 if success, 1 if not.
133 # get the max length of the messages in order to make the display
134 max_len = len(max(d_content.keys(), key=len))
137 # loop over each directory or file stored in the d_content dictionary
138 names = sorted(d_content.keys())
139 DBG.write("add tar names", names)
141 # used to avoid duplications (for pip install in python, or single_install_dir cases)
144 # display information
145 len_points = max_len - len(name) + 3
146 local_path, archive_path = d_content[name]
147 in_archive = os.path.join(name_archive, archive_path)
148 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
149 # Get the local path and the path in archive
150 # of the directory or file to add
151 # Add it in the archive
153 key=local_path+"->"+in_archive
154 if key not in already_added:
155 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
156 already_added.add(key)
157 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
158 except Exception as e:
159 logger.write(src.printcolors.printcError(_("KO ")), 3)
160 logger.write(str(e), 3)
162 logger.write("\n", 3)
165 def exclude_VCS_and_extensions(filename):
166 ''' The function that is used to exclude from package the link to the
167 VCS repositories (like .git)
169 :param filename Str: The filname to exclude (or not).
170 :return: True if the file has to be exclude
173 for dir_name in IGNORED_DIRS:
174 if dir_name in filename:
176 for extension in IGNORED_EXTENSIONS:
177 if filename.endswith(extension):
181 def produce_relative_launcher(config,
186 '''Create a specific SALOME launcher for the binary package. This launcher
189 :param config Config: The global configuration.
190 :param logger Logger: the logging instance
191 :param file_dir str: the directory where to put the launcher
192 :param file_name str: The launcher name
193 :param binaries_dir_name str: the name of the repository where the binaries
195 :return: the path of the produced launcher
199 # get KERNEL installation path
200 kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
202 # set kernel bin dir (considering fhs property)
203 kernel_cfg = src.product.get_product_config(config, "KERNEL")
204 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
205 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
207 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
209 # check if the application contains an application module
210 # check also if the application has a distene product,
211 # in this case get its licence file name
212 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
213 salome_application_name="Not defined"
214 distene_licence_file_name=False
215 for prod_name, prod_info in l_product_info:
216 # look for a "salome application" and a distene product
217 if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
218 distene_licence_file_name = src.product.product_has_licence(prod_info,
219 config.PATHS.LICENCEPATH)
220 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
221 salome_application_name=prod_info.name
223 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
224 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
225 if salome_application_name == "Not defined":
226 app_root_dir=kernel_root_dir
228 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
231 additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
232 config.VARS.sep + bin_kernel_install_dir
233 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
234 additional_env['sat_python_version'] = 3
236 additional_env['sat_python_version'] = 2
238 additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
240 # create an environment file writer
241 writer = src.environment.FileEnvWriter(config,
247 filepath = os.path.join(file_dir, file_name)
249 writer.write_env_file(filepath,
252 additional_env=additional_env,
253 no_path_init="False",
254 for_package = binaries_dir_name)
256 # Little hack to put out_dir_Path outside the strings
257 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
258 src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
260 # A hack to put a call to a file for distene licence.
261 # It does nothing to an application that has no distene product
262 if distene_licence_file_name:
263 logger.write("Application has a distene licence file! We use it in package launcher", 5)
264 hack_for_distene_licence(filepath, distene_licence_file_name)
266 # change the rights in order to make the file executable for everybody
278 def hack_for_distene_licence(filepath, licence_file):
279 '''Replace the distene licence env variable by a call to a file.
281 :param filepath Str: The path to the launcher to modify.
283 shutil.move(filepath, filepath + "_old")
285 filein = filepath + "_old"
286 fin = open(filein, "r")
287 fout = open(fileout, "w")
288 text = fin.readlines()
289 # Find the Distene section
291 for i,line in enumerate(text):
292 if "# Set DISTENE License" in line:
296 # No distene product, there is nothing to do
302 del text[num_line +1]
303 del text[num_line +1]
304 text_to_insert =""" try:
305 distene_licence_file=r"%s"
306 if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
307 import importlib.util
308 spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
309 distene=importlib.util.module_from_spec(spec_dist)
310 spec_dist.loader.exec_module(distene)
313 distene = imp.load_source('distene_licence', distene_licence_file)
314 distene.set_distene_variables(context)
316 pass\n""" % licence_file
317 text.insert(num_line + 1, text_to_insert)
324 def produce_relative_env_files(config,
328 '''Create some specific environment files for the binary package. These
329 files use relative paths.
331 :param config Config: The global configuration.
332 :param logger Logger: the logging instance
333 :param file_dir str: the directory where to put the files
334 :param binaries_dir_name str: the name of the repository where the binaries
336 :return: the list of path of the produced environment files
339 # create an environment file writer
340 writer = src.environment.FileEnvWriter(config,
345 if src.architecture.is_windows():
347 filename = "env_launch.bat"
350 filename = "env_launch.sh"
353 filepath = writer.write_env_file(filename,
356 for_package = binaries_dir_name)
358 # Little hack to put out_dir_Path as environment variable
359 if src.architecture.is_windows() :
360 src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
361 src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
363 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
365 # change the rights in order to make the file executable for everybody
377 def produce_install_bin_file(config,
382 '''Create a bash shell script which do substitutions in BIRARIES dir
383 in order to use it for extra compilations.
385 :param config Config: The global configuration.
386 :param logger Logger: the logging instance
387 :param file_dir str: the directory where to put the files
388 :param d_sub, dict: the dictionnary that contains the substitutions to be done
389 :param file_name str: the name of the install script file
390 :return: the produced file
394 filepath = os.path.join(file_dir, file_name)
395 # open the file and write into it
396 # use codec utf-8 as sat variables are in unicode
397 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
398 installbin_template_path = os.path.join(config.VARS.internal_dir,
399 "INSTALL_BIN.template")
401 # build the name of the directory that will contain the binaries
402 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
403 # build the substitution loop
404 loop_cmd = "for f in $(grep -RIl"
406 loop_cmd += " -e "+ key
407 loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
410 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
411 loop_cmd += ' " $f\ndone'
414 d["BINARIES_DIR"] = binaries_dir_name
415 d["SUBSTITUTION_LOOP"]=loop_cmd
416 d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
418 # substitute the template and write it in file
419 content=src.template.substitute(installbin_template_path, d)
420 installbin_file.write(content)
421 # change the rights in order to make the file executable for everybody
433 def product_appli_creation_script(config,
437 '''Create a script that can produce an application (EDF style) in the binary
440 :param config Config: The global configuration.
441 :param logger Logger: the logging instance
442 :param file_dir str: the directory where to put the file
443 :param binaries_dir_name str: the name of the repository where the binaries
445 :return: the path of the produced script file
448 template_name = "create_appli.py.for_bin_packages.template"
449 template_path = os.path.join(config.VARS.internal_dir, template_name)
450 text_to_fill = open(template_path, "r").read()
451 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
452 '"' + binaries_dir_name + '"')
455 for product_name in get_SALOME_modules(config):
456 product_info = src.product.get_product_config(config, product_name)
458 if src.product.product_is_smesh_plugin(product_info):
461 if 'install_dir' in product_info and bool(product_info.install_dir):
462 if src.product.product_is_cpp(product_info):
464 for cpp_name in src.product.get_product_components(product_info):
465 line_to_add = ("<module name=\"" +
467 "\" gui=\"yes\" path=\"''' + "
468 "os.path.join(dir_bin_name, \"" +
469 cpp_name + "\") + '''\"/>")
472 line_to_add = ("<module name=\"" +
474 "\" gui=\"yes\" path=\"''' + "
475 "os.path.join(dir_bin_name, \"" +
476 product_name + "\") + '''\"/>")
477 text_to_add += line_to_add + "\n"
479 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
481 tmp_file_path = os.path.join(file_dir, "create_appli.py")
482 ff = open(tmp_file_path, "w")
483 ff.write(filled_text)
486 # change the rights in order to make the file executable for everybody
487 os.chmod(tmp_file_path,
498 def binary_package(config, logger, options, tmp_working_dir):
499 '''Prepare a dictionary that stores all the needed directories and files to
500 add in a binary package.
502 :param config Config: The global configuration.
503 :param logger Logger: the logging instance
504 :param options OptResult: the options of the launched command
505 :param tmp_working_dir str: The temporary local directory containing some
506 specific directories or files needed in the
508 :return: the dictionary that stores all the needed directories and files to
509 add in a binary package.
510 {label : (path_on_local_machine, path_in_archive)}
514 # Get the list of product installation to add to the archive
515 l_products_name = sorted(config.APPLICATION.products.keys())
516 l_product_info = src.product.get_products_infos(l_products_name,
521 l_sources_not_present = []
522 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
523 if ("APPLICATION" in config and
524 "properties" in config.APPLICATION and
525 "mesa_launcher_in_package" in config.APPLICATION.properties and
526 config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
527 generate_mesa_launcher=True
529 for prod_name, prod_info in l_product_info:
530 # skip product with property not_in_package set to yes
531 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
534 # Add the sources of the products that have the property
535 # sources_in_package : "yes"
536 if src.get_property_in_product_cfg(prod_info,
537 "sources_in_package") == "yes":
538 if os.path.exists(prod_info.source_dir):
539 l_source_dir.append((prod_name, prod_info.source_dir))
541 l_sources_not_present.append(prod_name)
543 # ignore the native and fixed products for install directories
544 if (src.product.product_is_native(prod_info)
545 or src.product.product_is_fixed(prod_info)
546 or not src.product.product_compiles(prod_info)):
548 if src.product.check_installation(config, prod_info):
549 l_install_dir.append((prod_name, prod_info.install_dir))
551 l_not_installed.append(prod_name)
553 # Add also the cpp generated modules (if any)
554 if src.product.product_is_cpp(prod_info):
556 for name_cpp in src.product.get_product_components(prod_info):
557 install_dir = os.path.join(config.APPLICATION.workdir,
558 config.INTERNAL.config.install_dir,
560 if os.path.exists(install_dir):
561 l_install_dir.append((name_cpp, install_dir))
563 l_not_installed.append(name_cpp)
565 # check the name of the directory that (could) contains the binaries
566 # from previous detar
567 binaries_from_detar = os.path.join(
568 config.APPLICATION.workdir,
569 config.INTERNAL.config.binary_dir + config.VARS.dist)
570 if os.path.exists(binaries_from_detar):
572 WARNING: existing binaries directory from previous detar installation:
574 To make new package from this, you have to:
575 1) install binaries in INSTALL directory with the script "install_bin.sh"
576 see README file for more details
577 2) or recompile everything in INSTALL with "sat compile" command
578 this step is long, and requires some linux packages to be installed
580 """ % binaries_from_detar)
582 # Print warning or error if there are some missing products
583 if len(l_not_installed) > 0:
584 text_missing_prods = ""
585 for p_name in l_not_installed:
586 text_missing_prods += " - " + p_name + "\n"
587 if not options.force_creation:
588 msg = _("ERROR: there are missing product installations:")
589 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
592 raise src.SatException(msg)
594 msg = _("WARNING: there are missing products installations:")
595 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
599 # Do the same for sources
600 if len(l_sources_not_present) > 0:
601 text_missing_prods = ""
602 for p_name in l_sources_not_present:
603 text_missing_prods += "-" + p_name + "\n"
604 if not options.force_creation:
605 msg = _("ERROR: there are missing product sources:")
606 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
609 raise src.SatException(msg)
611 msg = _("WARNING: there are missing products sources:")
612 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
616 # construct the name of the directory that will contain the binaries
617 if src.architecture.is_windows():
618 binaries_dir_name = config.INTERNAL.config.binary_dir
620 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
621 # construct the correlation table between the product names, there
622 # actual install directories and there install directory in archive
624 for prod_name, install_dir in l_install_dir:
625 path_in_archive = os.path.join(binaries_dir_name, os.path.basename(install_dir))
626 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
628 for prod_name, source_dir in l_source_dir:
629 path_in_archive = os.path.join("SOURCES", prod_name)
630 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
632 # for packages of SALOME applications including KERNEL,
633 # we produce a salome launcher or a virtual application (depending on salome version)
634 if 'KERNEL' in config.APPLICATION.products:
635 VersionSalome = src.get_salome_version(config)
636 # Case where SALOME has the launcher that uses the SalomeContext API
637 if VersionSalome >= 730:
638 # create the relative launcher and add it to the files to add
639 launcher_name = src.get_launcher_name(config)
640 launcher_package = produce_relative_launcher(config,
645 d_products["launcher"] = (launcher_package, launcher_name)
647 # if the application contains mesa products, we generate in addition to the
648 # classical salome launcher a launcher using mesa and called mesa_salome
649 # (the mesa launcher will be used for remote usage through ssh).
650 if generate_mesa_launcher:
651 #if there is one : store the use_mesa property
652 restore_use_mesa_option=None
653 if ('properties' in config.APPLICATION and
654 'use_mesa' in config.APPLICATION.properties):
655 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
657 # activate mesa property, and generate a mesa launcher
658 src.activate_mesa_property(config) #activate use_mesa property
659 launcher_mesa_name="mesa_"+launcher_name
660 launcher_package_mesa = produce_relative_launcher(config,
665 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
667 # if there was a use_mesa value, we restore it
668 # else we set it to the default value "no"
669 if restore_use_mesa_option != None:
670 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
672 config.APPLICATION.properties.use_mesa="no"
675 # if we mix binaries and sources, we add a copy of the launcher,
676 # prefixed with "bin",in order to avoid clashes
677 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
679 # Provide a script for the creation of an application EDF style
680 appli_script = product_appli_creation_script(config,
685 d_products["appli script"] = (appli_script, "create_appli.py")
687 # Put also the environment file
688 env_file = produce_relative_env_files(config,
693 if src.architecture.is_windows():
694 filename = "env_launch.bat"
696 filename = "env_launch.sh"
697 d_products["environment file"] = (env_file, filename)
700 def source_package(sat, config, logger, options, tmp_working_dir):
701 '''Prepare a dictionary that stores all the needed directories and files to
702 add in a source package.
704 :param config Config: The global configuration.
705 :param logger Logger: the logging instance
706 :param options OptResult: the options of the launched command
707 :param tmp_working_dir str: The temporary local directory containing some
708 specific directories or files needed in the
710 :return: the dictionary that stores all the needed directories and files to
711 add in a source package.
712 {label : (path_on_local_machine, path_in_archive)}
717 # Get all the products that are prepared using an archive
718 # unless ftp mode is specified (in this case the user of the
719 # archive will get the sources through the ftp mode of sat prepare
721 logger.write("Find archive products ... ")
722 d_archives, l_pinfo_vcs = get_archives(config, logger)
723 logger.write("Done\n")
726 if not options.with_vcs and len(l_pinfo_vcs) > 0:
727 # Make archives with the products that are not prepared using an archive
728 # (git, cvs, svn, etc)
729 logger.write("Construct archives for vcs products ... ")
730 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
735 logger.write("Done\n")
738 logger.write("Create the project ... ")
739 d_project = create_project_for_src_package(config,
743 logger.write("Done\n")
746 tmp_sat = add_salomeTools(config, tmp_working_dir)
747 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
749 # Add a sat symbolic link if not win
750 if not src.architecture.is_windows():
751 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
755 # In the jobs, os.getcwd() can fail
756 t = config.LOCAL.workdir
757 os.chdir(tmp_working_dir)
758 if os.path.lexists(tmp_satlink_path):
759 os.remove(tmp_satlink_path)
760 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
763 d_sat["sat link"] = (tmp_satlink_path, "sat")
765 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
768 def get_archives(config, logger):
769 '''Find all the products that are get using an archive and all the products
770 that are get using a vcs (git, cvs, svn) repository.
772 :param config Config: The global configuration.
773 :param logger Logger: the logging instance
774 :return: the dictionary {name_product :
775 (local path of its archive, path in the package of its archive )}
776 and the list of specific configuration corresponding to the vcs
780 # Get the list of product informations
781 l_products_name = config.APPLICATION.products.keys()
782 l_product_info = src.product.get_products_infos(l_products_name,
786 for p_name, p_info in l_product_info:
787 # skip product with property not_in_package set to yes
788 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
790 # ignore the native and fixed products
791 if (src.product.product_is_native(p_info)
792 or src.product.product_is_fixed(p_info)):
794 if p_info.get_source == "archive":
795 archive_path = p_info.archive_info.archive_name
796 archive_name = os.path.basename(archive_path)
797 d_archives[p_name] = (archive_path,
798 os.path.join(ARCHIVE_DIR, archive_name))
799 if (src.appli_test_property(config,"pip", "yes") and
800 src.product.product_test_property(p_info,"pip", "yes")):
801 # if pip mode is activated, and product is managed by pip
802 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
803 pip_wheel_pattern=os.path.join(pip_wheels_dir,
804 "%s-%s*" % (p_info.name, p_info.version))
805 pip_wheel_path=glob.glob(pip_wheel_pattern)
806 msg_pip_not_found="Error in get_archive, pip wheel for "\
807 "product %s-%s was not found in %s directory"
808 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
809 "product %s-%s were found in %s directory"
810 if len(pip_wheel_path)==0:
811 raise src.SatException(msg_pip_not_found %\
812 (p_info.name, p_info.version, pip_wheels_dir))
813 if len(pip_wheel_path)>1:
814 raise src.SatException(msg_pip_two_or_more %\
815 (p_info.name, p_info.version, pip_wheels_dir))
817 pip_wheel_name=os.path.basename(pip_wheel_path[0])
818 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0],
819 os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
821 # this product is not managed by archive,
822 # an archive of the vcs directory will be created by get_archive_vcs
823 l_pinfo_vcs.append((p_name, p_info))
825 return d_archives, l_pinfo_vcs
827 def add_salomeTools(config, tmp_working_dir):
828 '''Prepare a version of salomeTools that has a specific local.pyconf file
829 configured for a source package.
831 :param config Config: The global configuration.
832 :param tmp_working_dir str: The temporary local directory containing some
833 specific directories or files needed in the
835 :return: The path to the local salomeTools directory to add in the package
838 # Copy sat in the temporary working directory
839 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
840 sat_running_path = src.Path(config.VARS.salometoolsway)
841 sat_running_path.copy(sat_tmp_path)
843 # Update the local.pyconf file that contains the path to the project
844 local_pyconf_name = "local.pyconf"
845 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
846 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
847 # Remove the .pyconf file in the root directory of salomeTools if there is
848 # any. (For example when launching jobs, a pyconf file describing the jobs
849 # can be here and is not useful)
850 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
851 for file_or_dir in files_or_dir_SAT:
852 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
853 file_path = os.path.join(tmp_working_dir,
858 ff = open(local_pyconf_file, "w")
859 ff.write(LOCAL_TEMPLATE)
862 return sat_tmp_path.path
864 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
865 '''For sources package that require that all products are get using an
866 archive, one has to create some archive for the vcs products.
867 So this method calls the clean and source command of sat and then create
870 :param l_pinfo_vcs List: The list of specific configuration corresponding to
872 :param sat Sat: The Sat instance that can be called to clean and source the
874 :param config Config: The global configuration.
875 :param logger Logger: the logging instance
876 :param tmp_working_dir str: The temporary local directory containing some
877 specific directories or files needed in the
879 :return: the dictionary that stores all the archives to add in the source
880 package. {label : (path_on_local_machine, path_in_archive)}
883 # clean the source directory of all the vcs products, then use the source
884 # command and thus construct an archive that will not contain the patches
885 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
886 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
887 logger.write(_("\nclean sources\n"))
888 args_clean = config.VARS.application
889 args_clean += " --sources --products "
890 args_clean += ",".join(l_prod_names)
891 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
892 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
895 logger.write(_("get sources\n"))
896 args_source = config.VARS.application
897 args_source += " --products "
898 args_source += ",".join(l_prod_names)
899 svgDir = sat.cfg.APPLICATION.workdir
900 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
901 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
902 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
903 # DBG.write("sat config id", id(sat.cfg), True)
904 # shit as config is not same id() as for sat.source()
905 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
907 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
909 # make the new archives
911 for pn, pinfo in l_pinfo_vcs:
912 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
913 logger.write("make archive vcs '%s'\n" % path_archive)
914 d_archives_vcs[pn] = (path_archive,
915 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
916 sat.cfg.APPLICATION.workdir = svgDir
917 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
918 return d_archives_vcs
920 def make_archive(prod_name, prod_info, where):
921 '''Create an archive of a product by searching its source directory.
923 :param prod_name str: The name of the product.
924 :param prod_info Config: The specific configuration corresponding to the
926 :param where str: The path of the repository where to put the resulting
928 :return: The path of the resulting archive
931 path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
932 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
933 local_path = prod_info.source_dir
934 tar_prod.add(local_path,
936 exclude=exclude_VCS_and_extensions)
938 return path_targz_prod
940 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
941 '''Create a specific project for a source package.
943 :param config Config: The global configuration.
944 :param tmp_working_dir str: The temporary local directory containing some
945 specific directories or files needed in the
947 :param with_vcs boolean: True if the package is with vcs products (not
948 transformed into archive products)
949 :param with_ftp boolean: True if the package use ftp servers to get archives
950 :return: The dictionary
951 {"project" : (produced project, project path in the archive)}
955 # Create in the working temporary directory the full project tree
956 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
957 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
959 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
962 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
965 patches_tmp_dir = os.path.join(project_tmp_dir,
968 application_tmp_dir = os.path.join(project_tmp_dir,
970 for directory in [project_tmp_dir,
971 compil_scripts_tmp_dir,
974 application_tmp_dir]:
975 src.ensure_path_exists(directory)
977 # Create the pyconf that contains the information of the project
978 project_pyconf_name = "project.pyconf"
979 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
980 ff = open(project_pyconf_file, "w")
981 ff.write(PROJECT_TEMPLATE)
982 if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
983 ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
984 for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
985 ftp_path=ftp_path+":"+ftpserver
987 ff.write("# ftp servers where to search for prerequisite archives\n")
989 # add licence paths if any
990 if len(config.PATHS.LICENCEPATH) > 0:
991 licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
992 for path in config.PATHS.LICENCEPATH[1:]:
993 licence_path=licence_path+":"+path
995 ff.write("\n# Where to search for licences\n")
996 ff.write(licence_path)
1001 # Loop over the products to get there pyconf and all the scripts
1002 # (compilation, environment, patches)
1003 # and create the pyconf file to add to the project
1004 lproducts_name = config.APPLICATION.products.keys()
1005 l_products = src.product.get_products_infos(lproducts_name, config)
1006 for p_name, p_info in l_products:
1007 # skip product with property not_in_package set to yes
1008 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1010 find_product_scripts_and_pyconf(p_name,
1014 compil_scripts_tmp_dir,
1015 env_scripts_tmp_dir,
1017 products_pyconf_tmp_dir)
1019 find_application_pyconf(config, application_tmp_dir)
1021 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1024 def find_product_scripts_and_pyconf(p_name,
1028 compil_scripts_tmp_dir,
1029 env_scripts_tmp_dir,
1031 products_pyconf_tmp_dir):
1032 '''Create a specific pyconf file for a given product. Get its environment
1033 script, its compilation script and patches and put it in the temporary
1034 working directory. This method is used in the source package in order to
1035 construct the specific project.
1037 :param p_name str: The name of the product.
1038 :param p_info Config: The specific configuration corresponding to the
1040 :param config Config: The global configuration.
1041 :param with_vcs boolean: True if the package is with vcs products (not
1042 transformed into archive products)
1043 :param compil_scripts_tmp_dir str: The path to the temporary compilation
1044 scripts directory of the project.
1045 :param env_scripts_tmp_dir str: The path to the temporary environment script
1046 directory of the project.
1047 :param patches_tmp_dir str: The path to the temporary patch scripts
1048 directory of the project.
1049 :param products_pyconf_tmp_dir str: The path to the temporary product
1050 scripts directory of the project.
1053 # read the pyconf of the product
1054 product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1056 # find the compilation script if any
1057 if src.product.product_has_script(p_info):
1058 compil_script_path = src.Path(p_info.compil_script)
1059 compil_script_path.copy(compil_scripts_tmp_dir)
1061 # find the environment script if any
1062 if src.product.product_has_env_script(p_info):
1063 env_script_path = src.Path(p_info.environ.env_script)
1064 env_script_path.copy(env_scripts_tmp_dir)
1066 # find the patches if any
1067 if src.product.product_has_patches(p_info):
1068 patches = src.pyconf.Sequence()
1069 for patch_path in p_info.patches:
1070 p_path = src.Path(patch_path)
1071 p_path.copy(patches_tmp_dir)
1072 patches.append(os.path.basename(patch_path), "")
1074 if (not with_vcs) and src.product.product_is_vcs(p_info):
1075 # in non vcs mode, if the product is not archive, then make it become archive.
1077 # depending upon the incremental mode, select impacted sections
1078 if "properties" in p_info and "incremental" in p_info.properties and\
1079 p_info.properties.incremental == "yes":
1080 sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1082 sections = [p_info.section]
1083 for section in sections:
1084 if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1085 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1087 product_pyconf_cfg[section].get_source = "archive"
1088 if not "archive_info" in product_pyconf_cfg[section]:
1089 product_pyconf_cfg[section].addMapping("archive_info",
1090 src.pyconf.Mapping(product_pyconf_cfg),
1092 product_pyconf_cfg[section].archive_info.archive_name =\
1093 p_info.name + ".tgz"
1095 # write the pyconf file to the temporary project location
1096 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1098 ff = open(product_tmp_pyconf_path, 'w')
1099 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1100 product_pyconf_cfg.__save__(ff, 1)
1103 def find_application_pyconf(config, application_tmp_dir):
1104 '''Find the application pyconf file and put it in the specific temporary
1105 directory containing the specific project of a source package.
1107 :param config Config: The global configuration.
1108 :param application_tmp_dir str: The path to the temporary application
1109 scripts directory of the project.
1111 # read the pyconf of the application
1112 application_name = config.VARS.application
1113 application_pyconf_path = src.find_file_in_lpath(
1114 application_name + ".pyconf",
1115 config.PATHS.APPLICATIONPATH)
1116 application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1118 # Change the workdir
1119 application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1120 application_pyconf_cfg,
1122 'VARS.salometoolsway + $VARS.sep + ".."')
1124 # Prevent from compilation in base
1125 application_pyconf_cfg.APPLICATION.no_base = "yes"
1127 #remove products that are not in config (which were filtered by --without_properties)
1128 for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1129 if product_name not in config.APPLICATION.products.keys():
1130 application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1132 # write the pyconf file to the temporary application location
1133 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1134 application_name + ".pyconf")
1136 ff = open(application_tmp_pyconf_path, 'w')
1137 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1138 application_pyconf_cfg.__save__(ff, 1)
1141 def sat_package(config, tmp_working_dir, options, logger):
1142 '''Prepare a dictionary that stores all the needed directories and files to
1143 add in a salomeTool package.
1145 :param tmp_working_dir str: The temporary local working directory
1146 :param options OptResult: the options of the launched command
1147 :return: the dictionary that stores all the needed directories and files to
1148 add in a salomeTool package.
1149 {label : (path_on_local_machine, path_in_archive)}
1154 # we include sat himself
1155 d_project["all_sat"]=(config.VARS.salometoolsway, "")
1157 # and we overwrite local.pyconf with a clean wersion.
1158 local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1159 local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1160 local_cfg = src.pyconf.Config(local_file_path)
1161 local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1162 local_cfg.LOCAL["base"] = "default"
1163 local_cfg.LOCAL["workdir"] = "default"
1164 local_cfg.LOCAL["log_dir"] = "default"
1165 local_cfg.LOCAL["archive_dir"] = "default"
1166 local_cfg.LOCAL["VCS"] = "None"
1167 local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1169 # if the archive contains a project, we write its relative path in local.pyconf
1171 project_arch_path = os.path.join("projects", options.project,
1172 os.path.basename(options.project_file_path))
1173 local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1175 ff = open(local_pyconf_tmp_path, 'w')
1176 local_cfg.__save__(ff, 1)
1178 d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1182 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1183 '''Prepare a dictionary that stores all the needed directories and files to
1184 add in a project package.
1186 :param project_file_path str: The path to the local project.
1187 :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1188 :param tmp_working_dir str: The temporary local directory containing some
1189 specific directories or files needed in the
1191 :param embedded_in_sat boolean : the project package is embedded in a sat package
1192 :return: the dictionary that stores all the needed directories and files to
1193 add in a project package.
1194 {label : (path_on_local_machine, path_in_archive)}
1198 # Read the project file and get the directories to add to the package
1201 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1204 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1205 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1206 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1208 paths = {"APPLICATIONPATH" : "applications",
1209 "PRODUCTPATH" : "products",
1211 "MACHINEPATH" : "machines"}
1213 paths["ARCHIVEPATH"] = "archives"
1215 # Loop over the project paths and add it
1216 project_file_name = os.path.basename(project_file_path)
1218 if path not in project_pyconf_cfg:
1221 dest_path = os.path.join("projects", name_project, paths[path])
1222 project_file_dest = os.path.join("projects", name_project, project_file_name)
1224 dest_path = paths[path]
1225 project_file_dest = project_file_name
1227 # Add the directory to the files to add in the package
1228 d_project[path] = (project_pyconf_cfg[path], dest_path)
1230 # Modify the value of the path in the package
1231 project_pyconf_cfg[path] = src.pyconf.Reference(
1234 'project_path + "/' + paths[path] + '"')
1236 # Modify some values
1237 if "project_path" not in project_pyconf_cfg:
1238 project_pyconf_cfg.addMapping("project_path",
1239 src.pyconf.Mapping(project_pyconf_cfg),
1241 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1244 # we don't want to export these two fields
1245 project_pyconf_cfg.__delitem__("file_path")
1246 project_pyconf_cfg.__delitem__("PWD")
1248 project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1250 # Write the project pyconf file
1251 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1252 ff = open(project_pyconf_tmp_path, 'w')
1253 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1254 project_pyconf_cfg.__save__(ff, 1)
1256 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1260 def add_readme(config, options, where):
1261 readme_path = os.path.join(where, "README")
1262 with codecs.open(readme_path, "w", 'utf-8') as f:
1264 # templates for building the header
1266 # This package was generated with sat $version
1269 # Distribution : $dist
1271 In the following, $$ROOT represents the directory where you have installed
1272 SALOME (the directory where this file is located).
1275 if src.architecture.is_windows():
1276 readme_header = readme_header.replace('$$ROOT','%ROOT%')
1277 readme_compilation_with_binaries="""
1279 compilation based on the binaries used as prerequisites
1280 =======================================================
1282 If you fail to compile the complete application (for example because
1283 you are not root on your system and cannot install missing packages), you
1284 may try a partial compilation based on the binaries.
1285 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1286 and do some substitutions on cmake and .la files (replace the build directories
1288 The procedure to do it is:
1289 1) Remove or rename INSTALL directory if it exists
1290 2) Execute the shell script install_bin.sh:
1293 3) Use SalomeTool (as explained in Sources section) and compile only the
1294 modules you need to (with -p option)
1297 readme_header_tpl=string.Template(readme_header)
1298 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1299 "README_BIN.template")
1300 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1301 "README_LAUNCHER.template")
1302 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1303 "README_BIN_VIRTUAL_APP.template")
1304 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1305 "README_SRC.template")
1306 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1307 "README_PROJECT.template")
1308 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1309 "README_SAT.template")
1311 # prepare substitution dictionary
1313 d['user'] = config.VARS.user
1314 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1315 d['version'] = src.get_salometool_version(config)
1316 d['dist'] = config.VARS.dist
1317 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1319 if options.binaries or options.sources:
1320 d['application'] = config.VARS.application
1321 d['BINARIES'] = config.INTERNAL.config.install_dir
1322 d['SEPARATOR'] = config.VARS.sep
1323 if src.architecture.is_windows():
1324 d['operatingSystem'] = 'Windows'
1325 d['PYTHON3'] = 'python3'
1326 d['ROOT'] = '%ROOT%'
1328 d['operatingSystem'] = 'Linux'
1331 f.write("# Application: " + d['application'] + "\n")
1332 if 'KERNEL' in config.APPLICATION.products:
1333 VersionSalome = src.get_salome_version(config)
1334 # Case where SALOME has the launcher that uses the SalomeContext API
1335 if VersionSalome >= 730:
1336 d['launcher'] = config.APPLICATION.profile.launcher_name
1338 d['virtual_app'] = 'runAppli' # this info is not used now)
1340 # write the specific sections
1341 if options.binaries:
1342 f.write(src.template.substitute(readme_template_path_bin, d))
1343 if "virtual_app" in d:
1344 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1346 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1349 f.write(src.template.substitute(readme_template_path_src, d))
1351 if options.binaries and options.sources and not src.architecture.is_windows():
1352 f.write(readme_compilation_with_binaries)
1355 f.write(src.template.substitute(readme_template_path_pro, d))
1358 f.write(src.template.substitute(readme_template_path_sat, d))
1362 def update_config(config, prop, value):
1363 '''Remove from config.APPLICATION.products the products that have the property given as input.
1365 :param config Config: The global config.
1366 :param prop str: The property to filter
1367 :param value str: The value of the property to filter
1369 # if there is no APPLICATION (ex sat package -t) : nothing to do
1370 if "APPLICATION" in config:
1371 l_product_to_remove = []
1372 for product_name in config.APPLICATION.products.keys():
1373 prod_cfg = src.product.get_product_config(config, product_name)
1374 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1375 l_product_to_remove.append(product_name)
1376 for product_name in l_product_to_remove:
1377 config.APPLICATION.products.__delitem__(product_name)
1380 '''method that is called when salomeTools is called with --help option.
1382 :return: The text to display for the package command description.
1386 The package command creates a tar file archive of a product.
1387 There are four kinds of archive, which can be mixed:
1389 1 - The binary archive.
1390 It contains the product installation directories plus a launcher.
1391 2 - The sources archive.
1392 It contains the product archives, a project (the application plus salomeTools).
1393 3 - The project archive.
1394 It contains a project (give the project file path as argument).
1395 4 - The salomeTools archive.
1396 It contains code utility salomeTools.
1399 >> sat package SALOME-master --binaries --sources""")
1401 def run(args, runner, logger):
1402 '''method that is called when salomeTools is called with package parameter.
1406 (options, args) = parser.parse_args(args)
1408 # Check that a type of package is called, and only one
1409 all_option_types = (options.binaries,
1411 options.project not in ["", None],
1414 # Check if no option for package type
1415 if all_option_types.count(True) == 0:
1416 msg = _("Error: Precise a type for the package\nUse one of the "
1417 "following options: --binaries, --sources, --project or"
1419 logger.write(src.printcolors.printcError(msg), 1)
1420 logger.write("\n", 1)
1423 # The repository where to put the package if not Binary or Source
1424 package_default_path = runner.cfg.LOCAL.workdir
1426 # if the package contains binaries or sources:
1427 if options.binaries or options.sources:
1428 # Check that the command has been called with an application
1429 src.check_config_has_application(runner.cfg)
1431 # Display information
1432 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1433 runner.cfg.VARS.application), 1)
1435 # Get the default directory where to put the packages
1436 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1437 src.ensure_path_exists(package_default_path)
1439 # if the package contains a project:
1441 # check that the project is visible by SAT
1442 projectNameFile = options.project + ".pyconf"
1444 for i in runner.cfg.PROJECTS.project_file_paths:
1445 baseName = os.path.basename(i)
1446 if baseName == projectNameFile:
1450 if foundProject is None:
1451 local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1452 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1456 Please add it in file:
1458 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1459 logger.write(src.printcolors.printcError(msg), 1)
1460 logger.write("\n", 1)
1463 options.project_file_path = foundProject
1464 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1466 # Remove the products that are filtered by the --without_properties option
1467 if options.without_properties:
1468 app = runner.cfg.APPLICATION
1469 logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1470 prop, value = options.without_properties
1471 update_config(runner.cfg, prop, value)
1472 logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1474 # Remove from config the products that have the not_in_package property
1475 update_config(runner.cfg, "not_in_package", "yes")
1477 # get the name of the archive or build it
1479 if os.path.basename(options.name) == options.name:
1480 # only a name (not a path)
1481 archive_name = options.name
1482 dir_name = package_default_path
1484 archive_name = os.path.basename(options.name)
1485 dir_name = os.path.dirname(options.name)
1487 # suppress extension
1488 if archive_name[-len(".tgz"):] == ".tgz":
1489 archive_name = archive_name[:-len(".tgz")]
1490 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1491 archive_name = archive_name[:-len(".tar.gz")]
1495 dir_name = package_default_path
1496 if options.binaries or options.sources:
1497 archive_name = runner.cfg.APPLICATION.name
1499 if options.binaries:
1500 archive_name += "-"+runner.cfg.VARS.dist
1503 archive_name += "-SRC"
1504 if options.with_vcs:
1505 archive_name += "-VCS"
1508 archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1513 project_name = options.project
1514 archive_name += ("satproject_" + project_name)
1516 if len(archive_name)==0: # no option worked
1517 msg = _("Error: Cannot name the archive\n"
1518 " check if at least one of the following options was "
1519 "selected : --binaries, --sources, --project or"
1521 logger.write(src.printcolors.printcError(msg), 1)
1522 logger.write("\n", 1)
1525 path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1527 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1529 # Create a working directory for all files that are produced during the
1530 # package creation and that will be removed at the end of the command
1531 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1532 src.ensure_path_exists(tmp_working_dir)
1533 logger.write("\n", 5)
1534 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1536 logger.write("\n", 3)
1538 msg = _("Preparation of files to add to the archive")
1539 logger.write(src.printcolors.printcLabel(msg), 2)
1540 logger.write("\n", 2)
1542 d_files_to_add={} # content of the archive
1544 # a dict to hold paths that will need to be substitute for users recompilations
1545 d_paths_to_substitute={}
1547 if options.binaries:
1548 d_bin_files_to_add = binary_package(runner.cfg,
1552 # for all binaries dir, store the substitution that will be required
1553 # for extra compilations
1554 for key in d_bin_files_to_add:
1555 if key.endswith("(bin)"):
1556 source_dir = d_bin_files_to_add[key][0]
1557 path_in_archive = d_bin_files_to_add[key][1].replace(
1558 runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1559 runner.cfg.INTERNAL.config.install_dir)
1560 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1561 # if basename is the same we will just substitute the dirname
1562 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1563 os.path.dirname(path_in_archive)
1565 d_paths_to_substitute[source_dir]=path_in_archive
1567 d_files_to_add.update(d_bin_files_to_add)
1569 d_files_to_add.update(source_package(runner,
1574 if options.binaries:
1575 # for archives with bin and sources we provide a shell script able to
1576 # install binaries for compilation
1577 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1579 d_paths_to_substitute,
1581 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1582 logger.write("substitutions that need to be done later : \n", 5)
1583 logger.write(str(d_paths_to_substitute), 5)
1584 logger.write("\n", 5)
1586 # --salomeTool option is not considered when --sources is selected, as this option
1587 # already brings salomeTool!
1589 d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir,
1593 DBG.write("config for package %s" % project_name, runner.cfg)
1594 d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1596 if not(d_files_to_add):
1597 msg = _("Error: Empty dictionnary to build the archive!\n")
1598 logger.write(src.printcolors.printcError(msg), 1)
1599 logger.write("\n", 1)
1602 # Add the README file in the package
1603 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1604 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1606 # Add the additional files of option add_files
1607 if options.add_files:
1608 for file_path in options.add_files:
1609 if not os.path.exists(file_path):
1610 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1612 file_name = os.path.basename(file_path)
1613 d_files_to_add[file_name] = (file_path, file_name)
1615 logger.write("\n", 2)
1616 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1617 logger.write("\n", 2)
1618 logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1622 # Creating the object tarfile
1623 tar = tarfile.open(path_targz, mode='w:gz')
1625 # get the filtering function if needed
1626 filter_function = exclude_VCS_and_extensions
1628 # Add the files to the tarfile object
1629 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1631 except KeyboardInterrupt:
1632 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1633 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1634 # remove the working directory
1635 shutil.rmtree(tmp_working_dir)
1636 logger.write(_("OK"), 1)
1637 logger.write(_("\n"), 1)
1640 # case if no application, only package sat as 'sat package -t'
1642 app = runner.cfg.APPLICATION
1646 # unconditionaly remove the tmp_local_working_dir
1648 tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1649 if os.path.isdir(tmp_local_working_dir):
1650 shutil.rmtree(tmp_local_working_dir)
1652 # remove the tmp directory, unless user has registered as developer
1653 if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1654 shutil.rmtree(tmp_working_dir)
1656 # Print again the path of the package
1657 logger.write("\n", 2)
1658 src.printcolors.print_value(logger, "Package path", path_targz, 2)