3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
31 from application import get_SALOME_modules
32 import src.debug as DBG
39 ARCHIVE_DIR = "ARCHIVES"
40 PROJECT_DIR = "PROJECT"
42 IGNORED_DIRS = [".git", ".svn"]
43 IGNORED_EXTENSIONS = []
45 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
47 PROJECT_TEMPLATE = """#!/usr/bin/env python
50 # The path to the archive root directory
51 root_path : $PWD + "/../"
53 project_path : $PWD + "/"
55 # Where to search the archives of the products
56 ARCHIVEPATH : $root_path + "ARCHIVES"
57 # Where to search the pyconf of the applications
58 APPLICATIONPATH : $project_path + "applications/"
59 # Where to search the pyconf of the products
60 PRODUCTPATH : $project_path + "products/"
61 # Where to search the pyconf of the jobs of the project
62 JOBPATH : $project_path + "jobs/"
63 # Where to search the pyconf of the machines of the project
64 MACHINEPATH : $project_path + "machines/"
67 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
75 archive_dir : 'default'
82 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
83 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
87 # Define all possible option for the package command : sat package <options>
88 parser = src.options.Options()
89 parser.add_option('b', 'binaries', 'boolean', 'binaries',
90 _('Optional: Produce a binary package.'), False)
91 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
92 _('Optional: Only binary package: produce the archive even if '
93 'there are some missing products.'), False)
94 parser.add_option('s', 'sources', 'boolean', 'sources',
95 _('Optional: Produce a compilable archive of the sources of the '
96 'application.'), False)
97 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
98 _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
99 'Sat prepare will use VCS mode instead to retrieve them'),
101 parser.add_option('', 'ftp', 'boolean', 'ftp',
102 _('Optional: Do not embed archives for products in archive mode.'
103 'Sat prepare will use ftp instead to retrieve them'),
105 parser.add_option('p', 'project', 'string', 'project',
106 _('Optional: Produce an archive that contains a project.'), "")
107 parser.add_option('t', 'salometools', 'boolean', 'sat',
108 _('Optional: Produce an archive that contains salomeTools.'), False)
109 parser.add_option('n', 'name', 'string', 'name',
110 _('Optional: The name or full path of the archive.'), None)
111 parser.add_option('', 'add_files', 'list2', 'add_files',
112 _('Optional: The list of additional files to add to the archive.'), [])
113 parser.add_option('', 'without_properties', 'properties', 'without_properties',
114 _('Optional: Filter the products by their properties.\n\tSyntax: '
115 '--without_properties <property>:<value>'))
118 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
119 '''Create an archive containing all directories and files that are given in
120 the d_content argument.
122 :param tar tarfile: The tarfile instance used to make the archive.
123 :param name_archive str: The name of the archive to make.
124 :param d_content dict: The dictionary that contain all directories and files
125 to add in the archive.
127 (path_on_local_machine, path_in_archive)
128 :param logger Logger: the logging instance
129 :param f_exclude Function: the function that filters
130 :return: 0 if success, 1 if not.
133 # get the max length of the messages in order to make the display
134 max_len = len(max(d_content.keys(), key=len))
137 # loop over each directory or file stored in the d_content dictionary
138 names = sorted(d_content.keys())
139 DBG.write("add tar names", names)
141 # used to avoid duplications (for pip install in python, or single_install_dir cases)
144 # display information
145 len_points = max_len - len(name) + 3
146 local_path, archive_path = d_content[name]
147 in_archive = os.path.join(name_archive, archive_path)
148 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
149 # Get the local path and the path in archive
150 # of the directory or file to add
151 # Add it in the archive
153 key=local_path+"->"+in_archive
154 if key not in already_added:
155 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
156 already_added.add(key)
157 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
158 except Exception as e:
159 logger.write(src.printcolors.printcError(_("KO ")), 3)
160 logger.write(str(e), 3)
162 logger.write("\n", 3)
165 def exclude_VCS_and_extensions(filename):
166 ''' The function that is used to exclude from package the link to the
167 VCS repositories (like .git)
169 :param filename Str: The filname to exclude (or not).
170 :return: True if the file has to be exclude
173 for dir_name in IGNORED_DIRS:
174 if dir_name in filename:
176 for extension in IGNORED_EXTENSIONS:
177 if filename.endswith(extension):
181 def produce_relative_launcher(config,
186 '''Create a specific SALOME launcher for the binary package. This launcher
189 :param config Config: The global configuration.
190 :param logger Logger: the logging instance
191 :param file_dir str: the directory where to put the launcher
192 :param file_name str: The launcher name
193 :param binaries_dir_name str: the name of the repository where the binaries
195 :return: the path of the produced launcher
199 # get KERNEL installation path
200 kernel_info = src.product.get_product_config(config, "KERNEL")
201 kernel_base_name=os.path.basename(kernel_info.install_dir)
202 if kernel_base_name.startswith("config"):
203 # case of kernel installed in base. We remove "config-i"
204 kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
206 kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
208 # set kernel bin dir (considering fhs property)
209 kernel_cfg = src.product.get_product_config(config, "KERNEL")
210 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
211 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
213 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
215 # check if the application contains an application module
216 # check also if the application has a distene product,
217 # in this case get its licence file name
218 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
219 salome_application_name="Not defined"
220 distene_licence_file_name=False
221 for prod_name, prod_info in l_product_info:
222 # look for a "salome application" and a distene product
223 if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
224 distene_licence_file_name = src.product.product_has_licence(prod_info,
225 config.PATHS.LICENCEPATH)
226 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
227 salome_application_name=prod_info.name
229 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
230 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
231 if salome_application_name == "Not defined":
232 app_root_dir=kernel_root_dir
234 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
237 additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
238 config.VARS.sep + bin_kernel_install_dir
239 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
240 additional_env['sat_python_version'] = 3
242 additional_env['sat_python_version'] = 2
244 additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
246 # create an environment file writer
247 writer = src.environment.FileEnvWriter(config,
253 filepath = os.path.join(file_dir, file_name)
255 writer.write_env_file(filepath,
258 additional_env=additional_env,
259 no_path_init="False",
260 for_package = binaries_dir_name)
262 # Little hack to put out_dir_Path outside the strings
263 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
264 src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
266 # A hack to put a call to a file for distene licence.
267 # It does nothing to an application that has no distene product
268 if distene_licence_file_name:
269 logger.write("Application has a distene licence file! We use it in package launcher", 5)
270 hack_for_distene_licence(filepath, distene_licence_file_name)
272 # change the rights in order to make the file executable for everybody
284 def hack_for_distene_licence(filepath, licence_file):
285 '''Replace the distene licence env variable by a call to a file.
287 :param filepath Str: The path to the launcher to modify.
289 shutil.move(filepath, filepath + "_old")
291 filein = filepath + "_old"
292 fin = open(filein, "r")
293 fout = open(fileout, "w")
294 text = fin.readlines()
295 # Find the Distene section
297 for i,line in enumerate(text):
298 if "# Set DISTENE License" in line:
302 # No distene product, there is nothing to do
308 del text[num_line +1]
309 del text[num_line +1]
310 text_to_insert =""" try:
311 distene_licence_file=r"%s"
312 if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
313 import importlib.util
314 spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
315 distene=importlib.util.module_from_spec(spec_dist)
316 spec_dist.loader.exec_module(distene)
319 distene = imp.load_source('distene_licence', distene_licence_file)
320 distene.set_distene_variables(context)
322 pass\n""" % licence_file
323 text.insert(num_line + 1, text_to_insert)
330 def produce_relative_env_files(config,
334 '''Create some specific environment files for the binary package. These
335 files use relative paths.
337 :param config Config: The global configuration.
338 :param logger Logger: the logging instance
339 :param file_dir str: the directory where to put the files
340 :param binaries_dir_name str: the name of the repository where the binaries
342 :return: the list of path of the produced environment files
345 # create an environment file writer
346 writer = src.environment.FileEnvWriter(config,
351 if src.architecture.is_windows():
353 filename = "env_launch.bat"
356 filename = "env_launch.sh"
359 filepath = writer.write_env_file(filename,
362 for_package = binaries_dir_name)
364 # Little hack to put out_dir_Path as environment variable
365 if src.architecture.is_windows() :
366 src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
367 src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
369 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
371 # change the rights in order to make the file executable for everybody
383 def produce_install_bin_file(config,
388 '''Create a bash shell script which do substitutions in BIRARIES dir
389 in order to use it for extra compilations.
391 :param config Config: The global configuration.
392 :param logger Logger: the logging instance
393 :param file_dir str: the directory where to put the files
394 :param d_sub, dict: the dictionnary that contains the substitutions to be done
395 :param file_name str: the name of the install script file
396 :return: the produced file
400 filepath = os.path.join(file_dir, file_name)
401 # open the file and write into it
402 # use codec utf-8 as sat variables are in unicode
403 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
404 installbin_template_path = os.path.join(config.VARS.internal_dir,
405 "INSTALL_BIN.template")
407 # build the name of the directory that will contain the binaries
408 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
409 # build the substitution loop
410 loop_cmd = "for f in $(grep -RIl"
412 loop_cmd += " -e "+ key
413 loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
416 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
417 loop_cmd += ' " $f\ndone'
420 d["BINARIES_DIR"] = binaries_dir_name
421 d["SUBSTITUTION_LOOP"]=loop_cmd
422 d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
424 # substitute the template and write it in file
425 content=src.template.substitute(installbin_template_path, d)
426 installbin_file.write(content)
427 # change the rights in order to make the file executable for everybody
439 def product_appli_creation_script(config,
443 '''Create a script that can produce an application (EDF style) in the binary
446 :param config Config: The global configuration.
447 :param logger Logger: the logging instance
448 :param file_dir str: the directory where to put the file
449 :param binaries_dir_name str: the name of the repository where the binaries
451 :return: the path of the produced script file
454 template_name = "create_appli.py.for_bin_packages.template"
455 template_path = os.path.join(config.VARS.internal_dir, template_name)
456 text_to_fill = open(template_path, "r").read()
457 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
458 '"' + binaries_dir_name + '"')
461 for product_name in get_SALOME_modules(config):
462 product_info = src.product.get_product_config(config, product_name)
464 if src.product.product_is_smesh_plugin(product_info):
467 if 'install_dir' in product_info and bool(product_info.install_dir):
468 if src.product.product_is_cpp(product_info):
470 for cpp_name in src.product.get_product_components(product_info):
471 line_to_add = ("<module name=\"" +
473 "\" gui=\"yes\" path=\"''' + "
474 "os.path.join(dir_bin_name, \"" +
475 cpp_name + "\") + '''\"/>")
478 line_to_add = ("<module name=\"" +
480 "\" gui=\"yes\" path=\"''' + "
481 "os.path.join(dir_bin_name, \"" +
482 product_name + "\") + '''\"/>")
483 text_to_add += line_to_add + "\n"
485 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
487 tmp_file_path = os.path.join(file_dir, "create_appli.py")
488 ff = open(tmp_file_path, "w")
489 ff.write(filled_text)
492 # change the rights in order to make the file executable for everybody
493 os.chmod(tmp_file_path,
504 def binary_package(config, logger, options, tmp_working_dir):
505 '''Prepare a dictionary that stores all the needed directories and files to
506 add in a binary package.
508 :param config Config: The global configuration.
509 :param logger Logger: the logging instance
510 :param options OptResult: the options of the launched command
511 :param tmp_working_dir str: The temporary local directory containing some
512 specific directories or files needed in the
514 :return: the dictionary that stores all the needed directories and files to
515 add in a binary package.
516 {label : (path_on_local_machine, path_in_archive)}
520 # Get the list of product installation to add to the archive
521 l_products_name = sorted(config.APPLICATION.products.keys())
522 l_product_info = src.product.get_products_infos(l_products_name,
527 l_sources_not_present = []
528 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
529 if ("APPLICATION" in config and
530 "properties" in config.APPLICATION and
531 "mesa_launcher_in_package" in config.APPLICATION.properties and
532 config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
533 generate_mesa_launcher=True
535 for prod_name, prod_info in l_product_info:
536 # skip product with property not_in_package set to yes
537 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
540 # Add the sources of the products that have the property
541 # sources_in_package : "yes"
542 if src.get_property_in_product_cfg(prod_info,
543 "sources_in_package") == "yes":
544 if os.path.exists(prod_info.source_dir):
545 l_source_dir.append((prod_name, prod_info.source_dir))
547 l_sources_not_present.append(prod_name)
549 # ignore the native and fixed products for install directories
550 if (src.product.product_is_native(prod_info)
551 or src.product.product_is_fixed(prod_info)
552 or not src.product.product_compiles(prod_info)):
554 if src.product.check_installation(config, prod_info):
555 l_install_dir.append((prod_name, prod_info.install_dir))
557 l_not_installed.append(prod_name)
559 # Add also the cpp generated modules (if any)
560 if src.product.product_is_cpp(prod_info):
562 for name_cpp in src.product.get_product_components(prod_info):
563 install_dir = os.path.join(config.APPLICATION.workdir,
564 config.INTERNAL.config.install_dir,
566 if os.path.exists(install_dir):
567 l_install_dir.append((name_cpp, install_dir))
569 l_not_installed.append(name_cpp)
571 # check the name of the directory that (could) contains the binaries
572 # from previous detar
573 binaries_from_detar = os.path.join(
574 config.APPLICATION.workdir,
575 config.INTERNAL.config.binary_dir + config.VARS.dist)
576 if os.path.exists(binaries_from_detar):
578 WARNING: existing binaries directory from previous detar installation:
580 To make new package from this, you have to:
581 1) install binaries in INSTALL directory with the script "install_bin.sh"
582 see README file for more details
583 2) or recompile everything in INSTALL with "sat compile" command
584 this step is long, and requires some linux packages to be installed
586 """ % binaries_from_detar)
588 # Print warning or error if there are some missing products
589 if len(l_not_installed) > 0:
590 text_missing_prods = ""
591 for p_name in l_not_installed:
592 text_missing_prods += " - " + p_name + "\n"
593 if not options.force_creation:
594 msg = _("ERROR: there are missing product installations:")
595 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
598 raise src.SatException(msg)
600 msg = _("WARNING: there are missing products installations:")
601 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
605 # Do the same for sources
606 if len(l_sources_not_present) > 0:
607 text_missing_prods = ""
608 for p_name in l_sources_not_present:
609 text_missing_prods += "-" + p_name + "\n"
610 if not options.force_creation:
611 msg = _("ERROR: there are missing product sources:")
612 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
615 raise src.SatException(msg)
617 msg = _("WARNING: there are missing products sources:")
618 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
622 # construct the name of the directory that will contain the binaries
623 if src.architecture.is_windows():
624 binaries_dir_name = config.INTERNAL.config.binary_dir
626 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
627 # construct the correlation table between the product names, there
628 # actual install directories and there install directory in archive
630 for prod_name, install_dir in l_install_dir:
631 prod_base_name=os.path.basename(install_dir)
632 if prod_base_name.startswith("config"):
633 # case of a products installed in base. We remove "config-i"
634 prod_base_name=os.path.basename(os.path.dirname(install_dir))
635 path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
636 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
638 for prod_name, source_dir in l_source_dir:
639 path_in_archive = os.path.join("SOURCES", prod_name)
640 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
642 # for packages of SALOME applications including KERNEL,
643 # we produce a salome launcher or a virtual application (depending on salome version)
644 if 'KERNEL' in config.APPLICATION.products:
645 VersionSalome = src.get_salome_version(config)
646 # Case where SALOME has the launcher that uses the SalomeContext API
647 if VersionSalome >= 730:
648 # create the relative launcher and add it to the files to add
649 launcher_name = src.get_launcher_name(config)
650 launcher_package = produce_relative_launcher(config,
655 d_products["launcher"] = (launcher_package, launcher_name)
657 # if the application contains mesa products, we generate in addition to the
658 # classical salome launcher a launcher using mesa and called mesa_salome
659 # (the mesa launcher will be used for remote usage through ssh).
660 if generate_mesa_launcher:
661 #if there is one : store the use_mesa property
662 restore_use_mesa_option=None
663 if ('properties' in config.APPLICATION and
664 'use_mesa' in config.APPLICATION.properties):
665 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
667 # activate mesa property, and generate a mesa launcher
668 src.activate_mesa_property(config) #activate use_mesa property
669 launcher_mesa_name="mesa_"+launcher_name
670 launcher_package_mesa = produce_relative_launcher(config,
675 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
677 # if there was a use_mesa value, we restore it
678 # else we set it to the default value "no"
679 if restore_use_mesa_option != None:
680 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
682 config.APPLICATION.properties.use_mesa="no"
685 # if we mix binaries and sources, we add a copy of the launcher,
686 # prefixed with "bin",in order to avoid clashes
687 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
689 # Provide a script for the creation of an application EDF style
690 appli_script = product_appli_creation_script(config,
695 d_products["appli script"] = (appli_script, "create_appli.py")
697 # Put also the environment file
698 env_file = produce_relative_env_files(config,
703 if src.architecture.is_windows():
704 filename = "env_launch.bat"
706 filename = "env_launch.sh"
707 d_products["environment file"] = (env_file, filename)
710 def source_package(sat, config, logger, options, tmp_working_dir):
711 '''Prepare a dictionary that stores all the needed directories and files to
712 add in a source package.
714 :param config Config: The global configuration.
715 :param logger Logger: the logging instance
716 :param options OptResult: the options of the launched command
717 :param tmp_working_dir str: The temporary local directory containing some
718 specific directories or files needed in the
720 :return: the dictionary that stores all the needed directories and files to
721 add in a source package.
722 {label : (path_on_local_machine, path_in_archive)}
727 # Get all the products that are prepared using an archive
728 # unless ftp mode is specified (in this case the user of the
729 # archive will get the sources through the ftp mode of sat prepare
731 logger.write("Find archive products ... ")
732 d_archives, l_pinfo_vcs = get_archives(config, logger)
733 logger.write("Done\n")
736 if not options.with_vcs and len(l_pinfo_vcs) > 0:
737 # Make archives with the products that are not prepared using an archive
738 # (git, cvs, svn, etc)
739 logger.write("Construct archives for vcs products ... ")
740 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
745 logger.write("Done\n")
748 logger.write("Create the project ... ")
749 d_project = create_project_for_src_package(config,
753 logger.write("Done\n")
756 tmp_sat = add_salomeTools(config, tmp_working_dir)
757 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
759 # Add a sat symbolic link if not win
760 if not src.architecture.is_windows():
761 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
765 # In the jobs, os.getcwd() can fail
766 t = config.LOCAL.workdir
767 os.chdir(tmp_working_dir)
768 if os.path.lexists(tmp_satlink_path):
769 os.remove(tmp_satlink_path)
770 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
773 d_sat["sat link"] = (tmp_satlink_path, "sat")
775 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
778 def get_archives(config, logger):
779 '''Find all the products that are get using an archive and all the products
780 that are get using a vcs (git, cvs, svn) repository.
782 :param config Config: The global configuration.
783 :param logger Logger: the logging instance
784 :return: the dictionary {name_product :
785 (local path of its archive, path in the package of its archive )}
786 and the list of specific configuration corresponding to the vcs
790 # Get the list of product informations
791 l_products_name = config.APPLICATION.products.keys()
792 l_product_info = src.product.get_products_infos(l_products_name,
796 for p_name, p_info in l_product_info:
797 # skip product with property not_in_package set to yes
798 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
800 # ignore the native and fixed products
801 if (src.product.product_is_native(p_info)
802 or src.product.product_is_fixed(p_info)):
804 if p_info.get_source == "archive":
805 archive_path = p_info.archive_info.archive_name
806 archive_name = os.path.basename(archive_path)
807 d_archives[p_name] = (archive_path,
808 os.path.join(ARCHIVE_DIR, archive_name))
809 if (src.appli_test_property(config,"pip", "yes") and
810 src.product.product_test_property(p_info,"pip", "yes")):
811 # if pip mode is activated, and product is managed by pip
812 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
813 pip_wheel_pattern=os.path.join(pip_wheels_dir,
814 "%s-%s*" % (p_info.name, p_info.version))
815 pip_wheel_path=glob.glob(pip_wheel_pattern)
816 msg_pip_not_found="Error in get_archive, pip wheel for "\
817 "product %s-%s was not found in %s directory"
818 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
819 "product %s-%s were found in %s directory"
820 if len(pip_wheel_path)==0:
821 raise src.SatException(msg_pip_not_found %\
822 (p_info.name, p_info.version, pip_wheels_dir))
823 if len(pip_wheel_path)>1:
824 raise src.SatException(msg_pip_two_or_more %\
825 (p_info.name, p_info.version, pip_wheels_dir))
827 pip_wheel_name=os.path.basename(pip_wheel_path[0])
828 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0],
829 os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
831 # this product is not managed by archive,
832 # an archive of the vcs directory will be created by get_archive_vcs
833 l_pinfo_vcs.append((p_name, p_info))
835 return d_archives, l_pinfo_vcs
837 def add_salomeTools(config, tmp_working_dir):
838 '''Prepare a version of salomeTools that has a specific local.pyconf file
839 configured for a source package.
841 :param config Config: The global configuration.
842 :param tmp_working_dir str: The temporary local directory containing some
843 specific directories or files needed in the
845 :return: The path to the local salomeTools directory to add in the package
848 # Copy sat in the temporary working directory
849 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
850 sat_running_path = src.Path(config.VARS.salometoolsway)
851 sat_running_path.copy(sat_tmp_path)
853 # Update the local.pyconf file that contains the path to the project
854 local_pyconf_name = "local.pyconf"
855 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
856 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
857 # Remove the .pyconf file in the root directory of salomeTools if there is
858 # any. (For example when launching jobs, a pyconf file describing the jobs
859 # can be here and is not useful)
860 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
861 for file_or_dir in files_or_dir_SAT:
862 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
863 file_path = os.path.join(tmp_working_dir,
868 ff = open(local_pyconf_file, "w")
869 ff.write(LOCAL_TEMPLATE)
872 return sat_tmp_path.path
874 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
875 '''For sources package that require that all products are get using an
876 archive, one has to create some archive for the vcs products.
877 So this method calls the clean and source command of sat and then create
880 :param l_pinfo_vcs List: The list of specific configuration corresponding to
882 :param sat Sat: The Sat instance that can be called to clean and source the
884 :param config Config: The global configuration.
885 :param logger Logger: the logging instance
886 :param tmp_working_dir str: The temporary local directory containing some
887 specific directories or files needed in the
889 :return: the dictionary that stores all the archives to add in the source
890 package. {label : (path_on_local_machine, path_in_archive)}
893 # clean the source directory of all the vcs products, then use the source
894 # command and thus construct an archive that will not contain the patches
895 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
896 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
897 logger.write(_("\nclean sources\n"))
898 args_clean = config.VARS.application
899 args_clean += " --sources --products "
900 args_clean += ",".join(l_prod_names)
901 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
902 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
905 logger.write(_("get sources\n"))
906 args_source = config.VARS.application
907 args_source += " --products "
908 args_source += ",".join(l_prod_names)
909 svgDir = sat.cfg.APPLICATION.workdir
910 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
911 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
912 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
913 # DBG.write("sat config id", id(sat.cfg), True)
914 # shit as config is not same id() as for sat.source()
915 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
917 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
919 # make the new archives
921 for pn, pinfo in l_pinfo_vcs:
922 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
923 logger.write("make archive vcs '%s'\n" % path_archive)
924 d_archives_vcs[pn] = (path_archive,
925 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
926 sat.cfg.APPLICATION.workdir = svgDir
927 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
928 return d_archives_vcs
930 def make_archive(prod_name, prod_info, where):
931 '''Create an archive of a product by searching its source directory.
933 :param prod_name str: The name of the product.
934 :param prod_info Config: The specific configuration corresponding to the
936 :param where str: The path of the repository where to put the resulting
938 :return: The path of the resulting archive
941 path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
942 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
943 local_path = prod_info.source_dir
944 tar_prod.add(local_path,
946 exclude=exclude_VCS_and_extensions)
948 return path_targz_prod
950 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
951 '''Create a specific project for a source package.
953 :param config Config: The global configuration.
954 :param tmp_working_dir str: The temporary local directory containing some
955 specific directories or files needed in the
957 :param with_vcs boolean: True if the package is with vcs products (not
958 transformed into archive products)
959 :param with_ftp boolean: True if the package use ftp servers to get archives
960 :return: The dictionary
961 {"project" : (produced project, project path in the archive)}
965 # Create in the working temporary directory the full project tree
966 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
967 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
969 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
972 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
975 patches_tmp_dir = os.path.join(project_tmp_dir,
978 application_tmp_dir = os.path.join(project_tmp_dir,
980 for directory in [project_tmp_dir,
981 compil_scripts_tmp_dir,
984 application_tmp_dir]:
985 src.ensure_path_exists(directory)
987 # Create the pyconf that contains the information of the project
988 project_pyconf_name = "project.pyconf"
989 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
990 ff = open(project_pyconf_file, "w")
991 ff.write(PROJECT_TEMPLATE)
992 if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
993 ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
994 for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
995 ftp_path=ftp_path+":"+ftpserver
997 ff.write("# ftp servers where to search for prerequisite archives\n")
999 # add licence paths if any
1000 if len(config.PATHS.LICENCEPATH) > 0:
1001 licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1002 for path in config.PATHS.LICENCEPATH[1:]:
1003 licence_path=licence_path+":"+path
1005 ff.write("\n# Where to search for licences\n")
1006 ff.write(licence_path)
1011 # Loop over the products to get there pyconf and all the scripts
1012 # (compilation, environment, patches)
1013 # and create the pyconf file to add to the project
1014 lproducts_name = config.APPLICATION.products.keys()
1015 l_products = src.product.get_products_infos(lproducts_name, config)
1016 for p_name, p_info in l_products:
1017 # skip product with property not_in_package set to yes
1018 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1020 find_product_scripts_and_pyconf(p_name,
1024 compil_scripts_tmp_dir,
1025 env_scripts_tmp_dir,
1027 products_pyconf_tmp_dir)
1029 find_application_pyconf(config, application_tmp_dir)
1031 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1034 def find_product_scripts_and_pyconf(p_name,
1038 compil_scripts_tmp_dir,
1039 env_scripts_tmp_dir,
1041 products_pyconf_tmp_dir):
1042 '''Create a specific pyconf file for a given product. Get its environment
1043 script, its compilation script and patches and put it in the temporary
1044 working directory. This method is used in the source package in order to
1045 construct the specific project.
1047 :param p_name str: The name of the product.
1048 :param p_info Config: The specific configuration corresponding to the
1050 :param config Config: The global configuration.
1051 :param with_vcs boolean: True if the package is with vcs products (not
1052 transformed into archive products)
1053 :param compil_scripts_tmp_dir str: The path to the temporary compilation
1054 scripts directory of the project.
1055 :param env_scripts_tmp_dir str: The path to the temporary environment script
1056 directory of the project.
1057 :param patches_tmp_dir str: The path to the temporary patch scripts
1058 directory of the project.
1059 :param products_pyconf_tmp_dir str: The path to the temporary product
1060 scripts directory of the project.
1063 # read the pyconf of the product
1064 product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1066 # find the compilation script if any
1067 if src.product.product_has_script(p_info):
1068 compil_script_path = src.Path(p_info.compil_script)
1069 compil_script_path.copy(compil_scripts_tmp_dir)
1071 # find the environment script if any
1072 if src.product.product_has_env_script(p_info):
1073 env_script_path = src.Path(p_info.environ.env_script)
1074 env_script_path.copy(env_scripts_tmp_dir)
1076 # find the patches if any
1077 if src.product.product_has_patches(p_info):
1078 patches = src.pyconf.Sequence()
1079 for patch_path in p_info.patches:
1080 p_path = src.Path(patch_path)
1081 p_path.copy(patches_tmp_dir)
1082 patches.append(os.path.basename(patch_path), "")
1084 if (not with_vcs) and src.product.product_is_vcs(p_info):
1085 # in non vcs mode, if the product is not archive, then make it become archive.
1087 # depending upon the incremental mode, select impacted sections
1088 if "properties" in p_info and "incremental" in p_info.properties and\
1089 p_info.properties.incremental == "yes":
1090 sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1092 sections = [p_info.section]
1093 for section in sections:
1094 if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1095 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1097 product_pyconf_cfg[section].get_source = "archive"
1098 if not "archive_info" in product_pyconf_cfg[section]:
1099 product_pyconf_cfg[section].addMapping("archive_info",
1100 src.pyconf.Mapping(product_pyconf_cfg),
1102 product_pyconf_cfg[section].archive_info.archive_name =\
1103 p_info.name + ".tgz"
1105 # write the pyconf file to the temporary project location
1106 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1108 ff = open(product_tmp_pyconf_path, 'w')
1109 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1110 product_pyconf_cfg.__save__(ff, 1)
1113 def find_application_pyconf(config, application_tmp_dir):
1114 '''Find the application pyconf file and put it in the specific temporary
1115 directory containing the specific project of a source package.
1117 :param config Config: The global configuration.
1118 :param application_tmp_dir str: The path to the temporary application
1119 scripts directory of the project.
1121 # read the pyconf of the application
1122 application_name = config.VARS.application
1123 application_pyconf_path = src.find_file_in_lpath(
1124 application_name + ".pyconf",
1125 config.PATHS.APPLICATIONPATH)
1126 application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1128 # Change the workdir
1129 application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1130 application_pyconf_cfg,
1132 'VARS.salometoolsway + $VARS.sep + ".."')
1134 # Prevent from compilation in base
1135 application_pyconf_cfg.APPLICATION.base = "no"
1137 #remove products that are not in config (which were filtered by --without_properties)
1138 for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1139 if product_name not in config.APPLICATION.products.keys():
1140 application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1142 # write the pyconf file to the temporary application location
1143 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1144 application_name + ".pyconf")
1146 ff = open(application_tmp_pyconf_path, 'w')
1147 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1148 application_pyconf_cfg.__save__(ff, 1)
1151 def sat_package(config, tmp_working_dir, options, logger):
1152 '''Prepare a dictionary that stores all the needed directories and files to
1153 add in a salomeTool package.
1155 :param tmp_working_dir str: The temporary local working directory
1156 :param options OptResult: the options of the launched command
1157 :return: the dictionary that stores all the needed directories and files to
1158 add in a salomeTool package.
1159 {label : (path_on_local_machine, path_in_archive)}
1164 # we include sat himself
1165 d_project["all_sat"]=(config.VARS.salometoolsway, "")
1167 # and we overwrite local.pyconf with a clean wersion.
1168 local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1169 local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1170 local_cfg = src.pyconf.Config(local_file_path)
1171 local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1172 local_cfg.LOCAL["base"] = "default"
1173 local_cfg.LOCAL["workdir"] = "default"
1174 local_cfg.LOCAL["log_dir"] = "default"
1175 local_cfg.LOCAL["archive_dir"] = "default"
1176 local_cfg.LOCAL["VCS"] = "None"
1177 local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1179 # if the archive contains a project, we write its relative path in local.pyconf
1181 project_arch_path = os.path.join("projects", options.project,
1182 os.path.basename(options.project_file_path))
1183 local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1185 ff = open(local_pyconf_tmp_path, 'w')
1186 local_cfg.__save__(ff, 1)
1188 d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1192 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1193 '''Prepare a dictionary that stores all the needed directories and files to
1194 add in a project package.
1196 :param project_file_path str: The path to the local project.
1197 :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1198 :param tmp_working_dir str: The temporary local directory containing some
1199 specific directories or files needed in the
1201 :param embedded_in_sat boolean : the project package is embedded in a sat package
1202 :return: the dictionary that stores all the needed directories and files to
1203 add in a project package.
1204 {label : (path_on_local_machine, path_in_archive)}
1208 # Read the project file and get the directories to add to the package
1211 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1214 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1215 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1216 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1218 paths = {"APPLICATIONPATH" : "applications",
1219 "PRODUCTPATH" : "products",
1221 "MACHINEPATH" : "machines"}
1223 paths["ARCHIVEPATH"] = "archives"
1225 # Loop over the project paths and add it
1226 project_file_name = os.path.basename(project_file_path)
1228 if path not in project_pyconf_cfg:
1231 dest_path = os.path.join("projects", name_project, paths[path])
1232 project_file_dest = os.path.join("projects", name_project, project_file_name)
1234 dest_path = paths[path]
1235 project_file_dest = project_file_name
1237 # Add the directory to the files to add in the package
1238 d_project[path] = (project_pyconf_cfg[path], dest_path)
1240 # Modify the value of the path in the package
1241 project_pyconf_cfg[path] = src.pyconf.Reference(
1244 'project_path + "/' + paths[path] + '"')
1246 # Modify some values
1247 if "project_path" not in project_pyconf_cfg:
1248 project_pyconf_cfg.addMapping("project_path",
1249 src.pyconf.Mapping(project_pyconf_cfg),
1251 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1254 # we don't want to export these two fields
1255 project_pyconf_cfg.__delitem__("file_path")
1256 project_pyconf_cfg.__delitem__("PWD")
1258 project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1260 # Write the project pyconf file
1261 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1262 ff = open(project_pyconf_tmp_path, 'w')
1263 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1264 project_pyconf_cfg.__save__(ff, 1)
1266 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1270 def add_readme(config, options, where):
1271 readme_path = os.path.join(where, "README")
1272 with codecs.open(readme_path, "w", 'utf-8') as f:
1274 # templates for building the header
1276 # This package was generated with sat $version
1279 # Distribution : $dist
1281 In the following, $$ROOT represents the directory where you have installed
1282 SALOME (the directory where this file is located).
1285 if src.architecture.is_windows():
1286 readme_header = readme_header.replace('$$ROOT','%ROOT%')
1287 readme_compilation_with_binaries="""
1289 compilation based on the binaries used as prerequisites
1290 =======================================================
1292 If you fail to compile the complete application (for example because
1293 you are not root on your system and cannot install missing packages), you
1294 may try a partial compilation based on the binaries.
1295 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1296 and do some substitutions on cmake and .la files (replace the build directories
1298 The procedure to do it is:
1299 1) Remove or rename INSTALL directory if it exists
1300 2) Execute the shell script install_bin.sh:
1303 3) Use SalomeTool (as explained in Sources section) and compile only the
1304 modules you need to (with -p option)
1307 readme_header_tpl=string.Template(readme_header)
1308 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1309 "README_BIN.template")
1310 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1311 "README_LAUNCHER.template")
1312 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1313 "README_BIN_VIRTUAL_APP.template")
1314 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1315 "README_SRC.template")
1316 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1317 "README_PROJECT.template")
1318 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1319 "README_SAT.template")
1321 # prepare substitution dictionary
1323 d['user'] = config.VARS.user
1324 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1325 d['version'] = src.get_salometool_version(config)
1326 d['dist'] = config.VARS.dist
1327 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1329 if options.binaries or options.sources:
1330 d['application'] = config.VARS.application
1331 d['BINARIES'] = config.INTERNAL.config.install_dir
1332 d['SEPARATOR'] = config.VARS.sep
1333 if src.architecture.is_windows():
1334 d['operatingSystem'] = 'Windows'
1335 d['PYTHON3'] = 'python3'
1336 d['ROOT'] = '%ROOT%'
1338 d['operatingSystem'] = 'Linux'
1341 f.write("# Application: " + d['application'] + "\n")
1342 if 'KERNEL' in config.APPLICATION.products:
1343 VersionSalome = src.get_salome_version(config)
1344 # Case where SALOME has the launcher that uses the SalomeContext API
1345 if VersionSalome >= 730:
1346 d['launcher'] = config.APPLICATION.profile.launcher_name
1348 d['virtual_app'] = 'runAppli' # this info is not used now)
1350 # write the specific sections
1351 if options.binaries:
1352 f.write(src.template.substitute(readme_template_path_bin, d))
1353 if "virtual_app" in d:
1354 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1356 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1359 f.write(src.template.substitute(readme_template_path_src, d))
1361 if options.binaries and options.sources and not src.architecture.is_windows():
1362 f.write(readme_compilation_with_binaries)
1365 f.write(src.template.substitute(readme_template_path_pro, d))
1368 f.write(src.template.substitute(readme_template_path_sat, d))
1372 def update_config(config, prop, value):
1373 '''Remove from config.APPLICATION.products the products that have the property given as input.
1375 :param config Config: The global config.
1376 :param prop str: The property to filter
1377 :param value str: The value of the property to filter
1379 # if there is no APPLICATION (ex sat package -t) : nothing to do
1380 if "APPLICATION" in config:
1381 l_product_to_remove = []
1382 for product_name in config.APPLICATION.products.keys():
1383 prod_cfg = src.product.get_product_config(config, product_name)
1384 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1385 l_product_to_remove.append(product_name)
1386 for product_name in l_product_to_remove:
1387 config.APPLICATION.products.__delitem__(product_name)
1390 '''method that is called when salomeTools is called with --help option.
1392 :return: The text to display for the package command description.
1396 The package command creates a tar file archive of a product.
1397 There are four kinds of archive, which can be mixed:
1399 1 - The binary archive.
1400 It contains the product installation directories plus a launcher.
1401 2 - The sources archive.
1402 It contains the product archives, a project (the application plus salomeTools).
1403 3 - The project archive.
1404 It contains a project (give the project file path as argument).
1405 4 - The salomeTools archive.
1406 It contains code utility salomeTools.
1409 >> sat package SALOME-master --binaries --sources""")
1411 def run(args, runner, logger):
1412 '''method that is called when salomeTools is called with package parameter.
1416 (options, args) = parser.parse_args(args)
1418 # Check that a type of package is called, and only one
1419 all_option_types = (options.binaries,
1421 options.project not in ["", None],
1424 # Check if no option for package type
1425 if all_option_types.count(True) == 0:
1426 msg = _("Error: Precise a type for the package\nUse one of the "
1427 "following options: --binaries, --sources, --project or"
1429 logger.write(src.printcolors.printcError(msg), 1)
1430 logger.write("\n", 1)
1433 # The repository where to put the package if not Binary or Source
1434 package_default_path = runner.cfg.LOCAL.workdir
1436 # if the package contains binaries or sources:
1437 if options.binaries or options.sources:
1438 # Check that the command has been called with an application
1439 src.check_config_has_application(runner.cfg)
1441 # Display information
1442 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1443 runner.cfg.VARS.application), 1)
1445 # Get the default directory where to put the packages
1446 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1447 src.ensure_path_exists(package_default_path)
1449 # if the package contains a project:
1451 # check that the project is visible by SAT
1452 projectNameFile = options.project + ".pyconf"
1454 for i in runner.cfg.PROJECTS.project_file_paths:
1455 baseName = os.path.basename(i)
1456 if baseName == projectNameFile:
1460 if foundProject is None:
1461 local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1462 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1466 Please add it in file:
1468 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1469 logger.write(src.printcolors.printcError(msg), 1)
1470 logger.write("\n", 1)
1473 options.project_file_path = foundProject
1474 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1476 # Remove the products that are filtered by the --without_properties option
1477 if options.without_properties:
1478 app = runner.cfg.APPLICATION
1479 logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1480 prop, value = options.without_properties
1481 update_config(runner.cfg, prop, value)
1482 logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1484 # Remove from config the products that have the not_in_package property
1485 update_config(runner.cfg, "not_in_package", "yes")
1487 # get the name of the archive or build it
1489 if os.path.basename(options.name) == options.name:
1490 # only a name (not a path)
1491 archive_name = options.name
1492 dir_name = package_default_path
1494 archive_name = os.path.basename(options.name)
1495 dir_name = os.path.dirname(options.name)
1497 # suppress extension
1498 if archive_name[-len(".tgz"):] == ".tgz":
1499 archive_name = archive_name[:-len(".tgz")]
1500 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1501 archive_name = archive_name[:-len(".tar.gz")]
1505 dir_name = package_default_path
1506 if options.binaries or options.sources:
1507 archive_name = runner.cfg.APPLICATION.name
1509 if options.binaries:
1510 archive_name += "-"+runner.cfg.VARS.dist
1513 archive_name += "-SRC"
1514 if options.with_vcs:
1515 archive_name += "-VCS"
1518 archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1523 archive_name += ("satproject_" + options.project)
1525 if len(archive_name)==0: # no option worked
1526 msg = _("Error: Cannot name the archive\n"
1527 " check if at least one of the following options was "
1528 "selected : --binaries, --sources, --project or"
1530 logger.write(src.printcolors.printcError(msg), 1)
1531 logger.write("\n", 1)
1534 path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1536 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1538 # Create a working directory for all files that are produced during the
1539 # package creation and that will be removed at the end of the command
1540 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1541 src.ensure_path_exists(tmp_working_dir)
1542 logger.write("\n", 5)
1543 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1545 logger.write("\n", 3)
1547 msg = _("Preparation of files to add to the archive")
1548 logger.write(src.printcolors.printcLabel(msg), 2)
1549 logger.write("\n", 2)
1551 d_files_to_add={} # content of the archive
1553 # a dict to hold paths that will need to be substitute for users recompilations
1554 d_paths_to_substitute={}
1556 if options.binaries:
1557 d_bin_files_to_add = binary_package(runner.cfg,
1561 # for all binaries dir, store the substitution that will be required
1562 # for extra compilations
1563 for key in d_bin_files_to_add:
1564 if key.endswith("(bin)"):
1565 source_dir = d_bin_files_to_add[key][0]
1566 path_in_archive = d_bin_files_to_add[key][1].replace(
1567 runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1568 runner.cfg.INTERNAL.config.install_dir)
1569 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1570 # if basename is the same we will just substitute the dirname
1571 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1572 os.path.dirname(path_in_archive)
1574 d_paths_to_substitute[source_dir]=path_in_archive
1576 d_files_to_add.update(d_bin_files_to_add)
1578 d_files_to_add.update(source_package(runner,
1583 if options.binaries:
1584 # for archives with bin and sources we provide a shell script able to
1585 # install binaries for compilation
1586 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1588 d_paths_to_substitute,
1590 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1591 logger.write("substitutions that need to be done later : \n", 5)
1592 logger.write(str(d_paths_to_substitute), 5)
1593 logger.write("\n", 5)
1595 # --salomeTool option is not considered when --sources is selected, as this option
1596 # already brings salomeTool!
1598 d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir,
1602 DBG.write("config for package %s" % options.project, runner.cfg)
1603 d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1605 if not(d_files_to_add):
1606 msg = _("Error: Empty dictionnary to build the archive!\n")
1607 logger.write(src.printcolors.printcError(msg), 1)
1608 logger.write("\n", 1)
1611 # Add the README file in the package
1612 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1613 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1615 # Add the additional files of option add_files
1616 if options.add_files:
1617 for file_path in options.add_files:
1618 if not os.path.exists(file_path):
1619 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1621 file_name = os.path.basename(file_path)
1622 d_files_to_add[file_name] = (file_path, file_name)
1624 logger.write("\n", 2)
1625 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1626 logger.write("\n", 2)
1627 logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1631 # Creating the object tarfile
1632 tar = tarfile.open(path_targz, mode='w:gz')
1634 # get the filtering function if needed
1635 filter_function = exclude_VCS_and_extensions
1637 # Add the files to the tarfile object
1638 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1640 except KeyboardInterrupt:
1641 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1642 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1643 # remove the working directory
1644 shutil.rmtree(tmp_working_dir)
1645 logger.write(_("OK"), 1)
1646 logger.write(_("\n"), 1)
1649 # case if no application, only package sat as 'sat package -t'
1651 app = runner.cfg.APPLICATION
1655 # unconditionaly remove the tmp_local_working_dir
1657 tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1658 if os.path.isdir(tmp_local_working_dir):
1659 shutil.rmtree(tmp_local_working_dir)
1661 # remove the tmp directory, unless user has registered as developer
1662 if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1663 shutil.rmtree(tmp_working_dir)
1665 # Print again the path of the package
1666 logger.write("\n", 2)
1667 src.printcolors.print_value(logger, "Package path", path_targz, 2)