3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
31 from application import get_SALOME_modules
32 import src.debug as DBG
39 ARCHIVE_DIR = "ARCHIVES"
40 PROJECT_DIR = "PROJECT"
42 IGNORED_DIRS = [".git", ".svn"]
43 IGNORED_EXTENSIONS = []
45 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
47 PROJECT_TEMPLATE = """#!/usr/bin/env python
50 # The path to the archive root directory
51 root_path : $PWD + "/../"
53 project_path : $PWD + "/"
55 # Where to search the archives of the products
56 ARCHIVEPATH : $root_path + "ARCHIVES"
57 # Where to search the pyconf of the applications
58 APPLICATIONPATH : $project_path + "applications/"
59 # Where to search the pyconf of the products
60 PRODUCTPATH : $project_path + "products/"
61 # Where to search the pyconf of the jobs of the project
62 JOBPATH : $project_path + "jobs/"
63 # Where to search the pyconf of the machines of the project
64 MACHINEPATH : $project_path + "machines/"
67 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
75 archive_dir : 'default'
82 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
83 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
87 # Define all possible option for the package command : sat package <options>
88 parser = src.options.Options()
89 parser.add_option('b', 'binaries', 'boolean', 'binaries',
90 _('Optional: Produce a binary package.'), False)
91 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
92 _('Optional: Only binary package: produce the archive even if '
93 'there are some missing products.'), False)
94 parser.add_option('s', 'sources', 'boolean', 'sources',
95 _('Optional: Produce a compilable archive of the sources of the '
96 'application.'), False)
97 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
98 _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
99 'Sat prepare will use VCS mode instead to retrieve them'),
101 parser.add_option('', 'ftp', 'boolean', 'ftp',
102 _('Optional: Do not embed archives for products in archive mode.'
103 'Sat prepare will use ftp instead to retrieve them'),
105 parser.add_option('p', 'project', 'string', 'project',
106 _('Optional: Produce an archive that contains a project.'), "")
107 parser.add_option('t', 'salometools', 'boolean', 'sat',
108 _('Optional: Produce an archive that contains salomeTools.'), False)
109 parser.add_option('n', 'name', 'string', 'name',
110 _('Optional: The name or full path of the archive.'), None)
111 parser.add_option('', 'add_files', 'list2', 'add_files',
112 _('Optional: The list of additional files to add to the archive.'), [])
113 parser.add_option('', 'without_properties', 'properties', 'without_properties',
114 _('Optional: Filter the products by their properties.\n\tSyntax: '
115 '--without_properties <property>:<value>'))
118 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
119 '''Create an archive containing all directories and files that are given in
120 the d_content argument.
122 :param tar tarfile: The tarfile instance used to make the archive.
123 :param name_archive str: The name of the archive to make.
124 :param d_content dict: The dictionary that contain all directories and files
125 to add in the archive.
127 (path_on_local_machine, path_in_archive)
128 :param logger Logger: the logging instance
129 :param f_exclude Function: the function that filters
130 :return: 0 if success, 1 if not.
133 # get the max length of the messages in order to make the display
134 max_len = len(max(d_content.keys(), key=len))
137 # loop over each directory or file stored in the d_content dictionary
138 names = sorted(d_content.keys())
139 DBG.write("add tar names", names)
141 # used to avoid duplications (for pip install in python, or single_install_dir cases)
144 # display information
145 len_points = max_len - len(name) + 3
146 local_path, archive_path = d_content[name]
147 in_archive = os.path.join(name_archive, archive_path)
148 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
149 # Get the local path and the path in archive
150 # of the directory or file to add
151 # Add it in the archive
153 key=local_path+"->"+in_archive
154 if key not in already_added:
155 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
156 already_added.add(key)
157 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
158 except Exception as e:
159 logger.write(src.printcolors.printcError(_("KO ")), 3)
160 logger.write(str(e), 3)
162 logger.write("\n", 3)
165 def exclude_VCS_and_extensions(filename):
166 ''' The function that is used to exclude from package the link to the
167 VCS repositories (like .git)
169 :param filename Str: The filname to exclude (or not).
170 :return: True if the file has to be exclude
173 for dir_name in IGNORED_DIRS:
174 if dir_name in filename:
176 for extension in IGNORED_EXTENSIONS:
177 if filename.endswith(extension):
181 def produce_relative_launcher(config,
186 '''Create a specific SALOME launcher for the binary package. This launcher
189 :param config Config: The global configuration.
190 :param logger Logger: the logging instance
191 :param file_dir str: the directory where to put the launcher
192 :param file_name str: The launcher name
193 :param binaries_dir_name str: the name of the repository where the binaries
195 :return: the path of the produced launcher
199 # get KERNEL installation path
200 kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
202 # set kernel bin dir (considering fhs property)
203 kernel_cfg = src.product.get_product_config(config, "KERNEL")
204 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
205 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
207 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
209 # check if the application contains an application module
210 # check also if the application has a distene product,
211 # in this case get its licence file name
212 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
213 salome_application_name="Not defined"
214 distene_licence_file_name=False
215 for prod_name, prod_info in l_product_info:
216 # look for a "salome application" and a distene product
217 if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
218 distene_licence_file_name = src.product.product_has_licence(prod_info,
219 config.PATHS.LICENCEPATH)
220 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
221 salome_application_name=prod_info.name
223 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
224 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
225 if salome_application_name == "Not defined":
226 app_root_dir=kernel_root_dir
228 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
231 additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
232 config.VARS.sep + bin_kernel_install_dir
233 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
234 additional_env['sat_python_version'] = 3
236 additional_env['sat_python_version'] = 2
238 additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
240 # create an environment file writer
241 writer = src.environment.FileEnvWriter(config,
247 filepath = os.path.join(file_dir, file_name)
249 writer.write_env_file(filepath,
252 additional_env=additional_env,
253 no_path_init="False",
254 for_package = binaries_dir_name)
256 # Little hack to put out_dir_Path outside the strings
257 if src.architecture.is_windows():
258 src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
259 src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
260 src.replace_in_file(filepath, 'out_dir_Path;', '%out_dir_Path%;' )
261 src.replace_in_file(filepath, 'r"out_dir_Path', '%out_dir_Path% + r"' )
262 src.replace_in_file(filepath, "r'out_dir_Path + ", "%out_dir_Path% + r'" )
264 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
265 src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
267 # A hack to put a call to a file for distene licence.
268 # It does nothing to an application that has no distene product
269 if distene_licence_file_name:
270 logger.write("Application has a distene licence file! We use it in package launcher", 5)
271 hack_for_distene_licence(filepath, distene_licence_file_name)
273 # change the rights in order to make the file executable for everybody
285 def hack_for_distene_licence(filepath, licence_file):
286 '''Replace the distene licence env variable by a call to a file.
288 :param filepath Str: The path to the launcher to modify.
290 shutil.move(filepath, filepath + "_old")
292 filein = filepath + "_old"
293 fin = open(filein, "r")
294 fout = open(fileout, "w")
295 text = fin.readlines()
296 # Find the Distene section
298 for i,line in enumerate(text):
299 if "# Set DISTENE License" in line:
303 # No distene product, there is nothing to do
309 del text[num_line +1]
310 del text[num_line +1]
311 text_to_insert =""" try:
312 distene_licence_file="%s"
313 if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
314 import importlib.util
315 spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
316 distene=importlib.util.module_from_spec(spec_dist)
317 spec_dist.loader.exec_module(distene)
320 distene = imp.load_source('distene_licence', distene_licence_file)
321 distene.set_distene_variables(context)
323 pass\n""" % licence_file
324 text.insert(num_line + 1, text_to_insert)
331 def produce_relative_env_files(config,
335 '''Create some specific environment files for the binary package. These
336 files use relative paths.
338 :param config Config: The global configuration.
339 :param logger Logger: the logging instance
340 :param file_dir str: the directory where to put the files
341 :param binaries_dir_name str: the name of the repository where the binaries
343 :return: the list of path of the produced environment files
346 # create an environment file writer
347 writer = src.environment.FileEnvWriter(config,
352 if src.architecture.is_windows():
354 filename = "env_launch.bat"
357 filename = "env_launch.sh"
360 filepath = writer.write_env_file(filename,
363 for_package = binaries_dir_name)
365 # Little hack to put out_dir_Path as environment variable
366 if src.architecture.is_windows() :
367 src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
368 src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
370 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
372 # change the rights in order to make the file executable for everybody
384 def produce_install_bin_file(config,
389 '''Create a bash shell script which do substitutions in BIRARIES dir
390 in order to use it for extra compilations.
392 :param config Config: The global configuration.
393 :param logger Logger: the logging instance
394 :param file_dir str: the directory where to put the files
395 :param d_sub, dict: the dictionnary that contains the substitutions to be done
396 :param file_name str: the name of the install script file
397 :return: the produced file
401 filepath = os.path.join(file_dir, file_name)
402 # open the file and write into it
403 # use codec utf-8 as sat variables are in unicode
404 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
405 installbin_template_path = os.path.join(config.VARS.internal_dir,
406 "INSTALL_BIN.template")
408 # build the name of the directory that will contain the binaries
409 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
410 # build the substitution loop
411 loop_cmd = "for f in $(grep -RIl"
413 loop_cmd += " -e "+ key
414 loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
417 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
418 loop_cmd += ' " $f\ndone'
421 d["BINARIES_DIR"] = binaries_dir_name
422 d["SUBSTITUTION_LOOP"]=loop_cmd
423 d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
425 # substitute the template and write it in file
426 content=src.template.substitute(installbin_template_path, d)
427 installbin_file.write(content)
428 # change the rights in order to make the file executable for everybody
440 def product_appli_creation_script(config,
444 '''Create a script that can produce an application (EDF style) in the binary
447 :param config Config: The global configuration.
448 :param logger Logger: the logging instance
449 :param file_dir str: the directory where to put the file
450 :param binaries_dir_name str: the name of the repository where the binaries
452 :return: the path of the produced script file
455 template_name = "create_appli.py.for_bin_packages.template"
456 template_path = os.path.join(config.VARS.internal_dir, template_name)
457 text_to_fill = open(template_path, "r").read()
458 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
459 '"' + binaries_dir_name + '"')
462 for product_name in get_SALOME_modules(config):
463 product_info = src.product.get_product_config(config, product_name)
465 if src.product.product_is_smesh_plugin(product_info):
468 if 'install_dir' in product_info and bool(product_info.install_dir):
469 if src.product.product_is_cpp(product_info):
471 for cpp_name in src.product.get_product_components(product_info):
472 line_to_add = ("<module name=\"" +
474 "\" gui=\"yes\" path=\"''' + "
475 "os.path.join(dir_bin_name, \"" +
476 cpp_name + "\") + '''\"/>")
479 line_to_add = ("<module name=\"" +
481 "\" gui=\"yes\" path=\"''' + "
482 "os.path.join(dir_bin_name, \"" +
483 product_name + "\") + '''\"/>")
484 text_to_add += line_to_add + "\n"
486 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
488 tmp_file_path = os.path.join(file_dir, "create_appli.py")
489 ff = open(tmp_file_path, "w")
490 ff.write(filled_text)
493 # change the rights in order to make the file executable for everybody
494 os.chmod(tmp_file_path,
505 def binary_package(config, logger, options, tmp_working_dir):
506 '''Prepare a dictionary that stores all the needed directories and files to
507 add in a binary package.
509 :param config Config: The global configuration.
510 :param logger Logger: the logging instance
511 :param options OptResult: the options of the launched command
512 :param tmp_working_dir str: The temporary local directory containing some
513 specific directories or files needed in the
515 :return: the dictionary that stores all the needed directories and files to
516 add in a binary package.
517 {label : (path_on_local_machine, path_in_archive)}
521 # Get the list of product installation to add to the archive
522 l_products_name = sorted(config.APPLICATION.products.keys())
523 l_product_info = src.product.get_products_infos(l_products_name,
528 l_sources_not_present = []
529 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
530 if ("APPLICATION" in config and
531 "properties" in config.APPLICATION and
532 "mesa_launcher_in_package" in config.APPLICATION.properties and
533 config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
534 generate_mesa_launcher=True
536 for prod_name, prod_info in l_product_info:
537 # skip product with property not_in_package set to yes
538 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
541 # Add the sources of the products that have the property
542 # sources_in_package : "yes"
543 if src.get_property_in_product_cfg(prod_info,
544 "sources_in_package") == "yes":
545 if os.path.exists(prod_info.source_dir):
546 l_source_dir.append((prod_name, prod_info.source_dir))
548 l_sources_not_present.append(prod_name)
550 # ignore the native and fixed products for install directories
551 if (src.product.product_is_native(prod_info)
552 or src.product.product_is_fixed(prod_info)
553 or not src.product.product_compiles(prod_info)):
555 if src.product.check_installation(config, prod_info):
556 l_install_dir.append((prod_name, prod_info.install_dir))
558 l_not_installed.append(prod_name)
560 # Add also the cpp generated modules (if any)
561 if src.product.product_is_cpp(prod_info):
563 for name_cpp in src.product.get_product_components(prod_info):
564 install_dir = os.path.join(config.APPLICATION.workdir,
565 config.INTERNAL.config.install_dir,
567 if os.path.exists(install_dir):
568 l_install_dir.append((name_cpp, install_dir))
570 l_not_installed.append(name_cpp)
572 # check the name of the directory that (could) contains the binaries
573 # from previous detar
574 binaries_from_detar = os.path.join(
575 config.APPLICATION.workdir,
576 config.INTERNAL.config.binary_dir + config.VARS.dist)
577 if os.path.exists(binaries_from_detar):
579 WARNING: existing binaries directory from previous detar installation:
581 To make new package from this, you have to:
582 1) install binaries in INSTALL directory with the script "install_bin.sh"
583 see README file for more details
584 2) or recompile everything in INSTALL with "sat compile" command
585 this step is long, and requires some linux packages to be installed
587 """ % binaries_from_detar)
589 # Print warning or error if there are some missing products
590 if len(l_not_installed) > 0:
591 text_missing_prods = ""
592 for p_name in l_not_installed:
593 text_missing_prods += "-" + p_name + "\n"
594 if not options.force_creation:
595 msg = _("ERROR: there are missing products installations:")
596 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
601 msg = _("WARNING: there are missing products installations:")
602 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
606 # Do the same for sources
607 if len(l_sources_not_present) > 0:
608 text_missing_prods = ""
609 for p_name in l_sources_not_present:
610 text_missing_prods += "-" + p_name + "\n"
611 if not options.force_creation:
612 msg = _("ERROR: there are missing products sources:")
613 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
618 msg = _("WARNING: there are missing products sources:")
619 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
623 # construct the name of the directory that will contain the binaries
624 if src.architecture.is_windows():
625 binaries_dir_name = config.INTERNAL.config.binary_dir
627 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
628 # construct the correlation table between the product names, there
629 # actual install directories and there install directory in archive
631 for prod_name, install_dir in l_install_dir:
632 path_in_archive = os.path.join(binaries_dir_name, os.path.basename(install_dir))
633 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
635 for prod_name, source_dir in l_source_dir:
636 path_in_archive = os.path.join("SOURCES", prod_name)
637 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
639 # for packages of SALOME applications including KERNEL,
640 # we produce a salome launcher or a virtual application (depending on salome version)
641 if 'KERNEL' in config.APPLICATION.products:
642 VersionSalome = src.get_salome_version(config)
643 # Case where SALOME has the launcher that uses the SalomeContext API
644 if VersionSalome >= 730:
645 # create the relative launcher and add it to the files to add
646 launcher_name = src.get_launcher_name(config)
647 launcher_package = produce_relative_launcher(config,
652 d_products["launcher"] = (launcher_package, launcher_name)
654 # if the application contains mesa products, we generate in addition to the
655 # classical salome launcher a launcher using mesa and called mesa_salome
656 # (the mesa launcher will be used for remote usage through ssh).
657 if generate_mesa_launcher:
658 #if there is one : store the use_mesa property
659 restore_use_mesa_option=None
660 if ('properties' in config.APPLICATION and
661 'use_mesa' in config.APPLICATION.properties):
662 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
664 # activate mesa property, and generate a mesa launcher
665 src.activate_mesa_property(config) #activate use_mesa property
666 launcher_mesa_name="mesa_"+launcher_name
667 launcher_package_mesa = produce_relative_launcher(config,
672 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
674 # if there was a use_mesa value, we restore it
675 # else we set it to the default value "no"
676 if restore_use_mesa_option != None:
677 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
679 config.APPLICATION.properties.use_mesa="no"
682 # if we mix binaries and sources, we add a copy of the launcher,
683 # prefixed with "bin",in order to avoid clashes
684 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
686 # Provide a script for the creation of an application EDF style
687 appli_script = product_appli_creation_script(config,
692 d_products["appli script"] = (appli_script, "create_appli.py")
694 # Put also the environment file
695 env_file = produce_relative_env_files(config,
700 if src.architecture.is_windows():
701 filename = "env_launch.bat"
703 filename = "env_launch.sh"
704 d_products["environment file"] = (env_file, filename)
708 def source_package(sat, config, logger, options, tmp_working_dir):
709 '''Prepare a dictionary that stores all the needed directories and files to
710 add in a source package.
712 :param config Config: The global configuration.
713 :param logger Logger: the logging instance
714 :param options OptResult: the options of the launched command
715 :param tmp_working_dir str: The temporary local directory containing some
716 specific directories or files needed in the
718 :return: the dictionary that stores all the needed directories and files to
719 add in a source package.
720 {label : (path_on_local_machine, path_in_archive)}
725 # Get all the products that are prepared using an archive
726 # unless ftp mode is specified (in this case the user of the
727 # archive will get the sources through the ftp mode of sat prepare
729 logger.write("Find archive products ... ")
730 d_archives, l_pinfo_vcs = get_archives(config, logger)
731 logger.write("Done\n")
734 if not options.with_vcs and len(l_pinfo_vcs) > 0:
735 # Make archives with the products that are not prepared using an archive
736 # (git, cvs, svn, etc)
737 logger.write("Construct archives for vcs products ... ")
738 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
743 logger.write("Done\n")
746 logger.write("Create the project ... ")
747 d_project = create_project_for_src_package(config,
751 logger.write("Done\n")
754 tmp_sat = add_salomeTools(config, tmp_working_dir)
755 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
757 # Add a sat symbolic link if not win
758 if not src.architecture.is_windows():
759 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
763 # In the jobs, os.getcwd() can fail
764 t = config.LOCAL.workdir
765 os.chdir(tmp_working_dir)
766 if os.path.lexists(tmp_satlink_path):
767 os.remove(tmp_satlink_path)
768 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
771 d_sat["sat link"] = (tmp_satlink_path, "sat")
773 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
776 def get_archives(config, logger):
777 '''Find all the products that are get using an archive and all the products
778 that are get using a vcs (git, cvs, svn) repository.
780 :param config Config: The global configuration.
781 :param logger Logger: the logging instance
782 :return: the dictionary {name_product :
783 (local path of its archive, path in the package of its archive )}
784 and the list of specific configuration corresponding to the vcs
788 # Get the list of product informations
789 l_products_name = config.APPLICATION.products.keys()
790 l_product_info = src.product.get_products_infos(l_products_name,
794 for p_name, p_info in l_product_info:
795 # skip product with property not_in_package set to yes
796 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
798 # ignore the native and fixed products
799 if (src.product.product_is_native(p_info)
800 or src.product.product_is_fixed(p_info)):
802 if p_info.get_source == "archive":
803 archive_path = p_info.archive_info.archive_name
804 archive_name = os.path.basename(archive_path)
805 d_archives[p_name] = (archive_path,
806 os.path.join(ARCHIVE_DIR, archive_name))
807 if (src.appli_test_property(config,"pip", "yes") and
808 src.product.product_test_property(p_info,"pip", "yes")):
809 # if pip mode is activated, and product is managed by pip
810 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
811 pip_wheel_pattern=os.path.join(pip_wheels_dir,
812 "%s-%s*" % (p_info.name, p_info.version))
813 pip_wheel_path=glob.glob(pip_wheel_pattern)
814 msg_pip_not_found="Error in get_archive, pip wheel for "\
815 "product %s-%s was not found in %s directory"
816 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
817 "product %s-%s were found in %s directory"
818 if len(pip_wheel_path)==0:
819 raise src.SatException(msg_pip_not_found %\
820 (p_info.name, p_info.version, pip_wheels_dir))
821 if len(pip_wheel_path)>1:
822 raise src.SatException(msg_pip_two_or_more %\
823 (p_info.name, p_info.version, pip_wheels_dir))
825 pip_wheel_name=os.path.basename(pip_wheel_path[0])
826 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0],
827 os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
829 # this product is not managed by archive,
830 # an archive of the vcs directory will be created by get_archive_vcs
831 l_pinfo_vcs.append((p_name, p_info))
833 return d_archives, l_pinfo_vcs
835 def add_salomeTools(config, tmp_working_dir):
836 '''Prepare a version of salomeTools that has a specific local.pyconf file
837 configured for a source package.
839 :param config Config: The global configuration.
840 :param tmp_working_dir str: The temporary local directory containing some
841 specific directories or files needed in the
843 :return: The path to the local salomeTools directory to add in the package
846 # Copy sat in the temporary working directory
847 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
848 sat_running_path = src.Path(config.VARS.salometoolsway)
849 sat_running_path.copy(sat_tmp_path)
851 # Update the local.pyconf file that contains the path to the project
852 local_pyconf_name = "local.pyconf"
853 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
854 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
855 # Remove the .pyconf file in the root directory of salomeTools if there is
856 # any. (For example when launching jobs, a pyconf file describing the jobs
857 # can be here and is not useful)
858 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
859 for file_or_dir in files_or_dir_SAT:
860 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
861 file_path = os.path.join(tmp_working_dir,
866 ff = open(local_pyconf_file, "w")
867 ff.write(LOCAL_TEMPLATE)
870 return sat_tmp_path.path
872 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
873 '''For sources package that require that all products are get using an
874 archive, one has to create some archive for the vcs products.
875 So this method calls the clean and source command of sat and then create
878 :param l_pinfo_vcs List: The list of specific configuration corresponding to
880 :param sat Sat: The Sat instance that can be called to clean and source the
882 :param config Config: The global configuration.
883 :param logger Logger: the logging instance
884 :param tmp_working_dir str: The temporary local directory containing some
885 specific directories or files needed in the
887 :return: the dictionary that stores all the archives to add in the source
888 package. {label : (path_on_local_machine, path_in_archive)}
891 # clean the source directory of all the vcs products, then use the source
892 # command and thus construct an archive that will not contain the patches
893 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
894 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
895 logger.write(_("\nclean sources\n"))
896 args_clean = config.VARS.application
897 args_clean += " --sources --products "
898 args_clean += ",".join(l_prod_names)
899 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
900 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
903 logger.write(_("get sources\n"))
904 args_source = config.VARS.application
905 args_source += " --products "
906 args_source += ",".join(l_prod_names)
907 svgDir = sat.cfg.APPLICATION.workdir
908 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
909 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
910 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
911 # DBG.write("sat config id", id(sat.cfg), True)
912 # shit as config is not same id() as for sat.source()
913 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
915 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
917 # make the new archives
919 for pn, pinfo in l_pinfo_vcs:
920 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
921 logger.write("make archive vcs '%s'\n" % path_archive)
922 d_archives_vcs[pn] = (path_archive,
923 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
924 sat.cfg.APPLICATION.workdir = svgDir
925 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
926 return d_archives_vcs
928 def make_archive(prod_name, prod_info, where):
929 '''Create an archive of a product by searching its source directory.
931 :param prod_name str: The name of the product.
932 :param prod_info Config: The specific configuration corresponding to the
934 :param where str: The path of the repository where to put the resulting
936 :return: The path of the resulting archive
939 path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
940 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
941 local_path = prod_info.source_dir
942 tar_prod.add(local_path,
944 exclude=exclude_VCS_and_extensions)
946 return path_targz_prod
948 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
949 '''Create a specific project for a source package.
951 :param config Config: The global configuration.
952 :param tmp_working_dir str: The temporary local directory containing some
953 specific directories or files needed in the
955 :param with_vcs boolean: True if the package is with vcs products (not
956 transformed into archive products)
957 :param with_ftp boolean: True if the package use ftp servers to get archives
958 :return: The dictionary
959 {"project" : (produced project, project path in the archive)}
963 # Create in the working temporary directory the full project tree
964 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
965 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
967 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
970 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
973 patches_tmp_dir = os.path.join(project_tmp_dir,
976 application_tmp_dir = os.path.join(project_tmp_dir,
978 for directory in [project_tmp_dir,
979 compil_scripts_tmp_dir,
982 application_tmp_dir]:
983 src.ensure_path_exists(directory)
985 # Create the pyconf that contains the information of the project
986 project_pyconf_name = "project.pyconf"
987 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
988 ff = open(project_pyconf_file, "w")
989 ff.write(PROJECT_TEMPLATE)
990 if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
991 ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
992 for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
993 ftp_path=ftp_path+":"+ftpserver
995 ff.write("# ftp servers where to search for prerequisite archives\n")
997 # add licence paths if any
998 if len(config.PATHS.LICENCEPATH) > 0:
999 licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1000 for path in config.PATHS.LICENCEPATH[1:]:
1001 licence_path=licence_path+":"+path
1003 ff.write("\n# Where to search for licences\n")
1004 ff.write(licence_path)
1009 # Loop over the products to get there pyconf and all the scripts
1010 # (compilation, environment, patches)
1011 # and create the pyconf file to add to the project
1012 lproducts_name = config.APPLICATION.products.keys()
1013 l_products = src.product.get_products_infos(lproducts_name, config)
1014 for p_name, p_info in l_products:
1015 # skip product with property not_in_package set to yes
1016 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1018 find_product_scripts_and_pyconf(p_name,
1022 compil_scripts_tmp_dir,
1023 env_scripts_tmp_dir,
1025 products_pyconf_tmp_dir)
1027 find_application_pyconf(config, application_tmp_dir)
1029 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1032 def find_product_scripts_and_pyconf(p_name,
1036 compil_scripts_tmp_dir,
1037 env_scripts_tmp_dir,
1039 products_pyconf_tmp_dir):
1040 '''Create a specific pyconf file for a given product. Get its environment
1041 script, its compilation script and patches and put it in the temporary
1042 working directory. This method is used in the source package in order to
1043 construct the specific project.
1045 :param p_name str: The name of the product.
1046 :param p_info Config: The specific configuration corresponding to the
1048 :param config Config: The global configuration.
1049 :param with_vcs boolean: True if the package is with vcs products (not
1050 transformed into archive products)
1051 :param compil_scripts_tmp_dir str: The path to the temporary compilation
1052 scripts directory of the project.
1053 :param env_scripts_tmp_dir str: The path to the temporary environment script
1054 directory of the project.
1055 :param patches_tmp_dir str: The path to the temporary patch scripts
1056 directory of the project.
1057 :param products_pyconf_tmp_dir str: The path to the temporary product
1058 scripts directory of the project.
1061 # read the pyconf of the product
1062 product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1064 # find the compilation script if any
1065 if src.product.product_has_script(p_info):
1066 compil_script_path = src.Path(p_info.compil_script)
1067 compil_script_path.copy(compil_scripts_tmp_dir)
1069 # find the environment script if any
1070 if src.product.product_has_env_script(p_info):
1071 env_script_path = src.Path(p_info.environ.env_script)
1072 env_script_path.copy(env_scripts_tmp_dir)
1074 # find the patches if any
1075 if src.product.product_has_patches(p_info):
1076 patches = src.pyconf.Sequence()
1077 for patch_path in p_info.patches:
1078 p_path = src.Path(patch_path)
1079 p_path.copy(patches_tmp_dir)
1080 patches.append(os.path.basename(patch_path), "")
1082 if (not with_vcs) and src.product.product_is_vcs(p_info):
1083 # in non vcs mode, if the product is not archive, then make it become archive.
1085 # depending upon the incremental mode, select impacted sections
1086 if "properties" in p_info and "incremental" in p_info.properties and\
1087 p_info.properties.incremental == "yes":
1088 sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1090 sections = [p_info.section]
1091 for section in sections:
1092 if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1093 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1095 product_pyconf_cfg[section].get_source = "archive"
1096 if not "archive_info" in product_pyconf_cfg[section]:
1097 product_pyconf_cfg[section].addMapping("archive_info",
1098 src.pyconf.Mapping(product_pyconf_cfg),
1100 product_pyconf_cfg[section].archive_info.archive_name =\
1101 p_info.name + ".tgz"
1103 # write the pyconf file to the temporary project location
1104 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1106 ff = open(product_tmp_pyconf_path, 'w')
1107 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1108 product_pyconf_cfg.__save__(ff, 1)
1111 def find_application_pyconf(config, application_tmp_dir):
1112 '''Find the application pyconf file and put it in the specific temporary
1113 directory containing the specific project of a source package.
1115 :param config Config: The global configuration.
1116 :param application_tmp_dir str: The path to the temporary application
1117 scripts directory of the project.
1119 # read the pyconf of the application
1120 application_name = config.VARS.application
1121 application_pyconf_path = src.find_file_in_lpath(
1122 application_name + ".pyconf",
1123 config.PATHS.APPLICATIONPATH)
1124 application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1126 # Change the workdir
1127 application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1128 application_pyconf_cfg,
1130 'VARS.salometoolsway + $VARS.sep + ".."')
1132 # Prevent from compilation in base
1133 application_pyconf_cfg.APPLICATION.no_base = "yes"
1135 #remove products that are not in config (which were filtered by --without_properties)
1136 for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1137 if product_name not in config.APPLICATION.products.keys():
1138 application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1140 # write the pyconf file to the temporary application location
1141 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1142 application_name + ".pyconf")
1144 ff = open(application_tmp_pyconf_path, 'w')
1145 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1146 application_pyconf_cfg.__save__(ff, 1)
1149 def sat_package(config, tmp_working_dir, options, logger):
1150 '''Prepare a dictionary that stores all the needed directories and files to
1151 add in a salomeTool package.
1153 :param tmp_working_dir str: The temporary local working directory
1154 :param options OptResult: the options of the launched command
1155 :return: the dictionary that stores all the needed directories and files to
1156 add in a salomeTool package.
1157 {label : (path_on_local_machine, path_in_archive)}
1162 # we include sat himself
1163 d_project["all_sat"]=(config.VARS.salometoolsway, "")
1165 # and we overwrite local.pyconf with a clean wersion.
1166 local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1167 local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1168 local_cfg = src.pyconf.Config(local_file_path)
1169 local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1170 local_cfg.LOCAL["base"] = "default"
1171 local_cfg.LOCAL["workdir"] = "default"
1172 local_cfg.LOCAL["log_dir"] = "default"
1173 local_cfg.LOCAL["archive_dir"] = "default"
1174 local_cfg.LOCAL["VCS"] = "None"
1175 local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1177 # if the archive contains a project, we write its relative path in local.pyconf
1179 project_arch_path = os.path.join("projects", options.project,
1180 os.path.basename(options.project_file_path))
1181 local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1183 ff = open(local_pyconf_tmp_path, 'w')
1184 local_cfg.__save__(ff, 1)
1186 d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1190 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1191 '''Prepare a dictionary that stores all the needed directories and files to
1192 add in a project package.
1194 :param project_file_path str: The path to the local project.
1195 :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1196 :param tmp_working_dir str: The temporary local directory containing some
1197 specific directories or files needed in the
1199 :param embedded_in_sat boolean : the project package is embedded in a sat package
1200 :return: the dictionary that stores all the needed directories and files to
1201 add in a project package.
1202 {label : (path_on_local_machine, path_in_archive)}
1206 # Read the project file and get the directories to add to the package
1209 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1212 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1213 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1214 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1216 paths = {"APPLICATIONPATH" : "applications",
1217 "PRODUCTPATH" : "products",
1219 "MACHINEPATH" : "machines"}
1221 paths["ARCHIVEPATH"] = "archives"
1223 # Loop over the project paths and add it
1224 project_file_name = os.path.basename(project_file_path)
1226 if path not in project_pyconf_cfg:
1229 dest_path = os.path.join("projects", name_project, paths[path])
1230 project_file_dest = os.path.join("projects", name_project, project_file_name)
1232 dest_path = paths[path]
1233 project_file_dest = project_file_name
1235 # Add the directory to the files to add in the package
1236 d_project[path] = (project_pyconf_cfg[path], dest_path)
1238 # Modify the value of the path in the package
1239 project_pyconf_cfg[path] = src.pyconf.Reference(
1242 'project_path + "/' + paths[path] + '"')
1244 # Modify some values
1245 if "project_path" not in project_pyconf_cfg:
1246 project_pyconf_cfg.addMapping("project_path",
1247 src.pyconf.Mapping(project_pyconf_cfg),
1249 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1252 # we don't want to export these two fields
1253 project_pyconf_cfg.__delitem__("file_path")
1254 project_pyconf_cfg.__delitem__("PWD")
1256 project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1258 # Write the project pyconf file
1259 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1260 ff = open(project_pyconf_tmp_path, 'w')
1261 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1262 project_pyconf_cfg.__save__(ff, 1)
1264 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1268 def add_readme(config, options, where):
1269 readme_path = os.path.join(where, "README")
1270 with codecs.open(readme_path, "w", 'utf-8') as f:
1272 # templates for building the header
1274 # This package was generated with sat $version
1277 # Distribution : $dist
1279 In the following, $$ROOT represents the directory where you have installed
1280 SALOME (the directory where this file is located).
1283 if src.architecture.is_windows():
1284 readme_header = readme_header.replace('$$ROOT','%ROOT%')
1285 readme_compilation_with_binaries="""
1287 compilation based on the binaries used as prerequisites
1288 =======================================================
1290 If you fail to compile the complete application (for example because
1291 you are not root on your system and cannot install missing packages), you
1292 may try a partial compilation based on the binaries.
1293 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1294 and do some substitutions on cmake and .la files (replace the build directories
1296 The procedure to do it is:
1297 1) Remove or rename INSTALL directory if it exists
1298 2) Execute the shell script install_bin.sh:
1301 3) Use SalomeTool (as explained in Sources section) and compile only the
1302 modules you need to (with -p option)
1305 readme_header_tpl=string.Template(readme_header)
1306 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1307 "README_BIN.template")
1308 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1309 "README_LAUNCHER.template")
1310 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1311 "README_BIN_VIRTUAL_APP.template")
1312 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1313 "README_SRC.template")
1314 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1315 "README_PROJECT.template")
1316 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1317 "README_SAT.template")
1319 # prepare substitution dictionary
1321 d['user'] = config.VARS.user
1322 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1323 d['version'] = src.get_salometool_version(config)
1324 d['dist'] = config.VARS.dist
1325 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1327 if options.binaries or options.sources:
1328 d['application'] = config.VARS.application
1329 d['BINARIES'] = config.INTERNAL.config.install_dir
1330 d['SEPARATOR'] = config.VARS.sep
1331 if src.architecture.is_windows():
1332 d['operatingSystem'] = 'Windows'
1333 d['PYTHON3'] = 'python3'
1334 d['ROOT'] = '%ROOT%'
1336 d['operatingSystem'] = 'Linux'
1339 f.write("# Application: " + d['application'] + "\n")
1340 if 'KERNEL' in config.APPLICATION.products:
1341 VersionSalome = src.get_salome_version(config)
1342 # Case where SALOME has the launcher that uses the SalomeContext API
1343 if VersionSalome >= 730:
1344 d['launcher'] = config.APPLICATION.profile.launcher_name
1346 d['virtual_app'] = 'runAppli' # this info is not used now)
1348 # write the specific sections
1349 if options.binaries:
1350 f.write(src.template.substitute(readme_template_path_bin, d))
1351 if "virtual_app" in d:
1352 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1354 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1357 f.write(src.template.substitute(readme_template_path_src, d))
1359 if options.binaries and options.sources and not src.architecture.is_windows():
1360 f.write(readme_compilation_with_binaries)
1363 f.write(src.template.substitute(readme_template_path_pro, d))
1366 f.write(src.template.substitute(readme_template_path_sat, d))
1370 def update_config(config, prop, value):
1371 '''Remove from config.APPLICATION.products the products that have the property given as input.
1373 :param config Config: The global config.
1374 :param prop str: The property to filter
1375 :param value str: The value of the property to filter
1377 # if there is no APPLICATION (ex sat package -t) : nothing to do
1378 if "APPLICATION" in config:
1379 l_product_to_remove = []
1380 for product_name in config.APPLICATION.products.keys():
1381 prod_cfg = src.product.get_product_config(config, product_name)
1382 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1383 l_product_to_remove.append(product_name)
1384 for product_name in l_product_to_remove:
1385 config.APPLICATION.products.__delitem__(product_name)
1388 '''method that is called when salomeTools is called with --help option.
1390 :return: The text to display for the package command description.
1394 The package command creates a tar file archive of a product.
1395 There are four kinds of archive, which can be mixed:
1397 1 - The binary archive.
1398 It contains the product installation directories plus a launcher.
1399 2 - The sources archive.
1400 It contains the product archives, a project (the application plus salomeTools).
1401 3 - The project archive.
1402 It contains a project (give the project file path as argument).
1403 4 - The salomeTools archive.
1404 It contains code utility salomeTools.
1407 >> sat package SALOME-master --binaries --sources""")
1409 def run(args, runner, logger):
1410 '''method that is called when salomeTools is called with package parameter.
1414 (options, args) = parser.parse_args(args)
1416 # Check that a type of package is called, and only one
1417 all_option_types = (options.binaries,
1419 options.project not in ["", None],
1422 # Check if no option for package type
1423 if all_option_types.count(True) == 0:
1424 msg = _("Error: Precise a type for the package\nUse one of the "
1425 "following options: --binaries, --sources, --project or"
1427 logger.write(src.printcolors.printcError(msg), 1)
1428 logger.write("\n", 1)
1431 # The repository where to put the package if not Binary or Source
1432 package_default_path = runner.cfg.LOCAL.workdir
1434 # if the package contains binaries or sources:
1435 if options.binaries or options.sources:
1436 # Check that the command has been called with an application
1437 src.check_config_has_application(runner.cfg)
1439 # Display information
1440 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1441 runner.cfg.VARS.application), 1)
1443 # Get the default directory where to put the packages
1444 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1445 src.ensure_path_exists(package_default_path)
1447 # if the package contains a project:
1449 # check that the project is visible by SAT
1450 projectNameFile = options.project + ".pyconf"
1452 for i in runner.cfg.PROJECTS.project_file_paths:
1453 baseName = os.path.basename(i)
1454 if baseName == projectNameFile:
1458 if foundProject is None:
1459 local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1460 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1464 Please add it in file:
1466 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1467 logger.write(src.printcolors.printcError(msg), 1)
1468 logger.write("\n", 1)
1471 options.project_file_path = foundProject
1472 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1474 # Remove the products that are filtered by the --without_properties option
1475 if options.without_properties:
1476 app = runner.cfg.APPLICATION
1477 logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1478 prop, value = options.without_properties
1479 update_config(runner.cfg, prop, value)
1480 logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1482 # Remove from config the products that have the not_in_package property
1483 update_config(runner.cfg, "not_in_package", "yes")
1485 # get the name of the archive or build it
1487 if os.path.basename(options.name) == options.name:
1488 # only a name (not a path)
1489 archive_name = options.name
1490 dir_name = package_default_path
1492 archive_name = os.path.basename(options.name)
1493 dir_name = os.path.dirname(options.name)
1495 # suppress extension
1496 if archive_name[-len(".tgz"):] == ".tgz":
1497 archive_name = archive_name[:-len(".tgz")]
1498 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1499 archive_name = archive_name[:-len(".tar.gz")]
1503 dir_name = package_default_path
1504 if options.binaries or options.sources:
1505 archive_name = runner.cfg.APPLICATION.name
1507 if options.binaries:
1508 archive_name += "-"+runner.cfg.VARS.dist
1511 archive_name += "-SRC"
1512 if options.with_vcs:
1513 archive_name += "-VCS"
1516 archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1521 project_name = options.project
1522 archive_name += ("satproject_" + project_name)
1524 if len(archive_name)==0: # no option worked
1525 msg = _("Error: Cannot name the archive\n"
1526 " check if at least one of the following options was "
1527 "selected : --binaries, --sources, --project or"
1529 logger.write(src.printcolors.printcError(msg), 1)
1530 logger.write("\n", 1)
1533 path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1535 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1537 # Create a working directory for all files that are produced during the
1538 # package creation and that will be removed at the end of the command
1539 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1540 src.ensure_path_exists(tmp_working_dir)
1541 logger.write("\n", 5)
1542 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1544 logger.write("\n", 3)
1546 msg = _("Preparation of files to add to the archive")
1547 logger.write(src.printcolors.printcLabel(msg), 2)
1548 logger.write("\n", 2)
1550 d_files_to_add={} # content of the archive
1552 # a dict to hold paths that will need to be substitute for users recompilations
1553 d_paths_to_substitute={}
1555 if options.binaries:
1556 d_bin_files_to_add = binary_package(runner.cfg,
1560 # for all binaries dir, store the substitution that will be required
1561 # for extra compilations
1562 for key in d_bin_files_to_add:
1563 if key.endswith("(bin)"):
1564 source_dir = d_bin_files_to_add[key][0]
1565 path_in_archive = d_bin_files_to_add[key][1].replace(
1566 runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1567 runner.cfg.INTERNAL.config.install_dir)
1568 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1569 # if basename is the same we will just substitute the dirname
1570 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1571 os.path.dirname(path_in_archive)
1573 d_paths_to_substitute[source_dir]=path_in_archive
1575 d_files_to_add.update(d_bin_files_to_add)
1578 d_files_to_add.update(source_package(runner,
1583 if options.binaries:
1584 # for archives with bin and sources we provide a shell script able to
1585 # install binaries for compilation
1586 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1588 d_paths_to_substitute,
1590 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1591 logger.write("substitutions that need to be done later : \n", 5)
1592 logger.write(str(d_paths_to_substitute), 5)
1593 logger.write("\n", 5)
1595 # --salomeTool option is not considered when --sources is selected, as this option
1596 # already brings salomeTool!
1598 d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir,
1602 DBG.write("config for package %s" % project_name, runner.cfg)
1603 d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1605 if not(d_files_to_add):
1606 msg = _("Error: Empty dictionnary to build the archive!\n")
1607 logger.write(src.printcolors.printcError(msg), 1)
1608 logger.write("\n", 1)
1611 # Add the README file in the package
1612 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1613 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1615 # Add the additional files of option add_files
1616 if options.add_files:
1617 for file_path in options.add_files:
1618 if not os.path.exists(file_path):
1619 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1621 file_name = os.path.basename(file_path)
1622 d_files_to_add[file_name] = (file_path, file_name)
1624 logger.write("\n", 2)
1625 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1626 logger.write("\n", 2)
1627 logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1631 # Creating the object tarfile
1632 tar = tarfile.open(path_targz, mode='w:gz')
1634 # get the filtering function if needed
1635 filter_function = exclude_VCS_and_extensions
1637 # Add the files to the tarfile object
1638 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1640 except KeyboardInterrupt:
1641 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1642 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1643 # remove the working directory
1644 shutil.rmtree(tmp_working_dir)
1645 logger.write(_("OK"), 1)
1646 logger.write(_("\n"), 1)
1649 # case if no application, only package sat as 'sat package -t'
1651 app = runner.cfg.APPLICATION
1655 # unconditionaly remove the tmp_local_working_dir
1657 tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1658 if os.path.isdir(tmp_local_working_dir):
1659 shutil.rmtree(tmp_local_working_dir)
1661 # remove the tmp directory, unless user has registered as developer
1662 if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1663 shutil.rmtree(tmp_working_dir)
1665 # Print again the path of the package
1666 logger.write("\n", 2)
1667 src.printcolors.print_value(logger, "Package path", path_targz, 2)