3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
31 from application import get_SALOME_modules
32 import src.debug as DBG
34 old_python = sys.version_info[0] == 2 and sys.version_info[1] <= 6
41 ARCHIVE_DIR = "ARCHIVES"
42 PROJECT_DIR = "PROJECT"
44 IGNORED_DIRS = [".git", ".svn"]
45 IGNORED_EXTENSIONS = []
47 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
49 PROJECT_TEMPLATE = """#!/usr/bin/env python
52 # The path to the archive root directory
53 root_path : $PWD + "/../"
55 project_path : $PWD + "/"
57 # Where to search the archives of the products
58 ARCHIVEPATH : $root_path + "ARCHIVES"
59 # Where to search the pyconf of the applications
60 APPLICATIONPATH : $project_path + "applications/"
61 # Where to search the pyconf of the products
62 PRODUCTPATH : $project_path + "products/"
63 # Where to search the pyconf of the jobs of the project
64 JOBPATH : $project_path + "jobs/"
65 # Where to search the pyconf of the machines of the project
66 MACHINEPATH : $project_path + "machines/"
69 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
77 archive_dir : 'default'
84 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
85 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
89 # Define all possible option for the package command : sat package <options>
90 parser = src.options.Options()
91 parser.add_option('b', 'binaries', 'boolean', 'binaries',
92 _('Optional: Produce a binary package.'), False)
93 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
94 _('Optional: Only binary package: produce the archive even if '
95 'there are some missing products.'), False)
96 parser.add_option('s', 'sources', 'boolean', 'sources',
97 _('Optional: Produce a compilable archive of the sources of the '
98 'application.'), False)
99 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
100 _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
101 'Sat prepare will use VCS mode instead to retrieve them'),
103 parser.add_option('', 'ftp', 'boolean', 'ftp',
104 _('Optional: Do not embed archives for products in archive mode.'
105 'Sat prepare will use ftp instead to retrieve them'),
107 parser.add_option('p', 'project', 'string', 'project',
108 _('Optional: Produce an archive that contains a project.'), "")
109 parser.add_option('t', 'salometools', 'boolean', 'sat',
110 _('Optional: Produce an archive that contains salomeTools.'), False)
111 parser.add_option('n', 'name', 'string', 'name',
112 _('Optional: The name or full path of the archive.'), None)
113 parser.add_option('', 'add_files', 'list2', 'add_files',
114 _('Optional: The list of additional files to add to the archive.'), [])
115 parser.add_option('', 'without_properties', 'properties', 'without_properties',
116 _('Optional: Filter the products by their properties.\n\tSyntax: '
117 '--without_properties <property>:<value>'))
120 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
121 '''Create an archive containing all directories and files that are given in
122 the d_content argument.
124 :param tar tarfile: The tarfile instance used to make the archive.
125 :param name_archive str: The name of the archive to make.
126 :param d_content dict: The dictionary that contain all directories and files
127 to add in the archive.
129 (path_on_local_machine, path_in_archive)
130 :param logger Logger: the logging instance
131 :param f_exclude Function: the function that filters
132 :return: 0 if success, 1 if not.
135 # get the max length of the messages in order to make the display
136 max_len = len(max(d_content.keys(), key=len))
139 # loop over each directory or file stored in the d_content dictionary
140 names = sorted(d_content.keys())
141 DBG.write("add tar names", names)
143 # used to avoid duplications (for pip install in python, or single_install_dir cases)
146 # display information
147 len_points = max_len - len(name) + 3
148 local_path, archive_path = d_content[name]
149 in_archive = os.path.join(name_archive, archive_path)
150 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
151 # Get the local path and the path in archive
152 # of the directory or file to add
153 # Add it in the archive
155 key=local_path+"->"+in_archive
156 if key not in already_added:
160 exclude=exclude_VCS_and_extensions_26)
164 filter=exclude_VCS_and_extensions)
165 already_added.add(key)
166 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
167 except Exception as e:
168 logger.write(src.printcolors.printcError(_("KO ")), 3)
169 logger.write(str(e), 3)
171 logger.write("\n", 3)
175 def exclude_VCS_and_extensions_26(filename):
176 ''' The function that is used to exclude from package the link to the
177 VCS repositories (like .git) (only for python 2.6)
179 :param filename Str: The filname to exclude (or not).
180 :return: True if the file has to be exclude
183 for dir_name in IGNORED_DIRS:
184 if dir_name in filename:
186 for extension in IGNORED_EXTENSIONS:
187 if filename.endswith(extension):
191 def exclude_VCS_and_extensions(tarinfo):
192 ''' The function that is used to exclude from package the link to the
193 VCS repositories (like .git)
195 :param filename Str: The filname to exclude (or not).
196 :return: None if the file has to be exclude
197 :rtype: tarinfo or None
199 filename = tarinfo.name
200 for dir_name in IGNORED_DIRS:
201 if dir_name in filename:
203 for extension in IGNORED_EXTENSIONS:
204 if filename.endswith(extension):
208 def produce_relative_launcher(config,
213 '''Create a specific SALOME launcher for the binary package. This launcher
216 :param config Config: The global configuration.
217 :param logger Logger: the logging instance
218 :param file_dir str: the directory where to put the launcher
219 :param file_name str: The launcher name
220 :param binaries_dir_name str: the name of the repository where the binaries
222 :return: the path of the produced launcher
226 # get KERNEL installation path
227 kernel_info = src.product.get_product_config(config, "KERNEL")
228 kernel_base_name=os.path.basename(kernel_info.install_dir)
229 if kernel_base_name.startswith("config"):
230 # case of kernel installed in base. We remove "config-i"
231 kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
233 kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
235 # set kernel bin dir (considering fhs property)
236 kernel_cfg = src.product.get_product_config(config, "KERNEL")
237 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
238 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
240 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
242 # check if the application contains an application module
243 # check also if the application has a distene product,
244 # in this case get its licence file name
245 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
246 salome_application_name="Not defined"
247 distene_licence_file_name=False
248 for prod_name, prod_info in l_product_info:
249 # look for a "salome application" and a distene product
250 if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
251 distene_licence_file_name = src.product.product_has_licence(prod_info,
252 config.PATHS.LICENCEPATH)
253 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
254 salome_application_name=prod_info.name
256 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
257 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
258 if salome_application_name == "Not defined":
259 app_root_dir=kernel_root_dir
261 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
264 additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
265 config.VARS.sep + bin_kernel_install_dir
266 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
267 additional_env['sat_python_version'] = 3
269 additional_env['sat_python_version'] = 2
271 additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
273 # create an environment file writer
274 writer = src.environment.FileEnvWriter(config,
280 filepath = os.path.join(file_dir, file_name)
282 writer.write_env_file(filepath,
285 additional_env=additional_env,
286 no_path_init="False",
287 for_package = binaries_dir_name)
289 # Little hack to put out_dir_Path outside the strings
290 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
291 src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
293 # A hack to put a call to a file for distene licence.
294 # It does nothing to an application that has no distene product
295 if distene_licence_file_name:
296 logger.write("Application has a distene licence file! We use it in package launcher", 5)
297 hack_for_distene_licence(filepath, distene_licence_file_name)
299 # change the rights in order to make the file executable for everybody
311 def hack_for_distene_licence(filepath, licence_file):
312 '''Replace the distene licence env variable by a call to a file.
314 :param filepath Str: The path to the launcher to modify.
316 shutil.move(filepath, filepath + "_old")
318 filein = filepath + "_old"
319 fin = open(filein, "r")
320 fout = open(fileout, "w")
321 text = fin.readlines()
322 # Find the Distene section
324 for i,line in enumerate(text):
325 if "# Set DISTENE License" in line:
329 # No distene product, there is nothing to do
335 del text[num_line +1]
336 del text[num_line +1]
337 text_to_insert =""" try:
338 distene_licence_file=r"%s"
339 if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
340 import importlib.util
341 spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
342 distene=importlib.util.module_from_spec(spec_dist)
343 spec_dist.loader.exec_module(distene)
346 distene = imp.load_source('distene_licence', distene_licence_file)
347 distene.set_distene_variables(context)
349 pass\n""" % licence_file
350 text.insert(num_line + 1, text_to_insert)
357 def produce_relative_env_files(config,
361 '''Create some specific environment files for the binary package. These
362 files use relative paths.
364 :param config Config: The global configuration.
365 :param logger Logger: the logging instance
366 :param file_dir str: the directory where to put the files
367 :param binaries_dir_name str: the name of the repository where the binaries
369 :return: the list of path of the produced environment files
372 # create an environment file writer
373 writer = src.environment.FileEnvWriter(config,
378 if src.architecture.is_windows():
380 filename = "env_launch.bat"
383 filename = "env_launch.sh"
386 filepath = writer.write_env_file(filename,
389 for_package = binaries_dir_name)
391 # Little hack to put out_dir_Path as environment variable
392 if src.architecture.is_windows() :
393 src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
394 src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
395 src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
397 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
398 src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
400 # change the rights in order to make the file executable for everybody
412 def produce_install_bin_file(config,
417 '''Create a bash shell script which do substitutions in BIRARIES dir
418 in order to use it for extra compilations.
420 :param config Config: The global configuration.
421 :param logger Logger: the logging instance
422 :param file_dir str: the directory where to put the files
423 :param d_sub, dict: the dictionnary that contains the substitutions to be done
424 :param file_name str: the name of the install script file
425 :return: the produced file
429 filepath = os.path.join(file_dir, file_name)
430 # open the file and write into it
431 # use codec utf-8 as sat variables are in unicode
432 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
433 installbin_template_path = os.path.join(config.VARS.internal_dir,
434 "INSTALL_BIN.template")
436 # build the name of the directory that will contain the binaries
437 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
438 # build the substitution loop
439 loop_cmd = "for f in $(grep -RIl"
441 loop_cmd += " -e "+ key
442 loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
445 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
446 loop_cmd += ' " $f\ndone'
449 d["BINARIES_DIR"] = binaries_dir_name
450 d["SUBSTITUTION_LOOP"]=loop_cmd
451 d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
453 # substitute the template and write it in file
454 content=src.template.substitute(installbin_template_path, d)
455 installbin_file.write(content)
456 # change the rights in order to make the file executable for everybody
468 def product_appli_creation_script(config,
472 '''Create a script that can produce an application (EDF style) in the binary
475 :param config Config: The global configuration.
476 :param logger Logger: the logging instance
477 :param file_dir str: the directory where to put the file
478 :param binaries_dir_name str: the name of the repository where the binaries
480 :return: the path of the produced script file
483 template_name = "create_appli.py.for_bin_packages.template"
484 template_path = os.path.join(config.VARS.internal_dir, template_name)
485 text_to_fill = open(template_path, "r").read()
486 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
487 '"' + binaries_dir_name + '"')
490 for product_name in get_SALOME_modules(config):
491 product_info = src.product.get_product_config(config, product_name)
493 if src.product.product_is_smesh_plugin(product_info):
496 if 'install_dir' in product_info and bool(product_info.install_dir):
497 if src.product.product_is_cpp(product_info):
499 for cpp_name in src.product.get_product_components(product_info):
500 line_to_add = ("<module name=\"" +
502 "\" gui=\"yes\" path=\"''' + "
503 "os.path.join(dir_bin_name, \"" +
504 cpp_name + "\") + '''\"/>")
507 line_to_add = ("<module name=\"" +
509 "\" gui=\"yes\" path=\"''' + "
510 "os.path.join(dir_bin_name, \"" +
511 product_name + "\") + '''\"/>")
512 text_to_add += line_to_add + "\n"
514 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
516 tmp_file_path = os.path.join(file_dir, "create_appli.py")
517 ff = open(tmp_file_path, "w")
518 ff.write(filled_text)
521 # change the rights in order to make the file executable for everybody
522 os.chmod(tmp_file_path,
533 def binary_package(config, logger, options, tmp_working_dir):
534 '''Prepare a dictionary that stores all the needed directories and files to
535 add in a binary package.
537 :param config Config: The global configuration.
538 :param logger Logger: the logging instance
539 :param options OptResult: the options of the launched command
540 :param tmp_working_dir str: The temporary local directory containing some
541 specific directories or files needed in the
543 :return: the dictionary that stores all the needed directories and files to
544 add in a binary package.
545 {label : (path_on_local_machine, path_in_archive)}
549 # Get the list of product installation to add to the archive
550 l_products_name = sorted(config.APPLICATION.products.keys())
551 l_product_info = src.product.get_products_infos(l_products_name,
556 l_sources_not_present = []
557 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
558 if ("APPLICATION" in config and
559 "properties" in config.APPLICATION and
560 "mesa_launcher_in_package" in config.APPLICATION.properties and
561 config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
562 generate_mesa_launcher=True
564 for prod_name, prod_info in l_product_info:
565 # skip product with property not_in_package set to yes
566 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
569 # Add the sources of the products that have the property
570 # sources_in_package : "yes"
571 if src.get_property_in_product_cfg(prod_info,
572 "sources_in_package") == "yes":
573 if os.path.exists(prod_info.source_dir):
574 l_source_dir.append((prod_name, prod_info.source_dir))
576 l_sources_not_present.append(prod_name)
578 # ignore the native and fixed products for install directories
579 if (src.product.product_is_native(prod_info)
580 or src.product.product_is_fixed(prod_info)
581 or not src.product.product_compiles(prod_info)):
583 if src.product.check_installation(config, prod_info):
584 l_install_dir.append((prod_name, prod_info.install_dir))
586 l_not_installed.append(prod_name)
588 # Add also the cpp generated modules (if any)
589 if src.product.product_is_cpp(prod_info):
591 for name_cpp in src.product.get_product_components(prod_info):
592 install_dir = os.path.join(config.APPLICATION.workdir,
593 config.INTERNAL.config.install_dir,
595 if os.path.exists(install_dir):
596 l_install_dir.append((name_cpp, install_dir))
598 l_not_installed.append(name_cpp)
600 # check the name of the directory that (could) contains the binaries
601 # from previous detar
602 binaries_from_detar = os.path.join(
603 config.APPLICATION.workdir,
604 config.INTERNAL.config.binary_dir + config.VARS.dist)
605 if os.path.exists(binaries_from_detar):
607 WARNING: existing binaries directory from previous detar installation:
609 To make new package from this, you have to:
610 1) install binaries in INSTALL directory with the script "install_bin.sh"
611 see README file for more details
612 2) or recompile everything in INSTALL with "sat compile" command
613 this step is long, and requires some linux packages to be installed
615 """ % binaries_from_detar)
617 # Print warning or error if there are some missing products
618 if len(l_not_installed) > 0:
619 text_missing_prods = ""
620 for p_name in l_not_installed:
621 text_missing_prods += " - " + p_name + "\n"
622 if not options.force_creation:
623 msg = _("ERROR: there are missing product installations:")
624 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
627 raise src.SatException(msg)
629 msg = _("WARNING: there are missing products installations:")
630 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
634 # Do the same for sources
635 if len(l_sources_not_present) > 0:
636 text_missing_prods = ""
637 for p_name in l_sources_not_present:
638 text_missing_prods += "-" + p_name + "\n"
639 if not options.force_creation:
640 msg = _("ERROR: there are missing product sources:")
641 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
644 raise src.SatException(msg)
646 msg = _("WARNING: there are missing products sources:")
647 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
651 # construct the name of the directory that will contain the binaries
652 if src.architecture.is_windows():
653 binaries_dir_name = config.INTERNAL.config.binary_dir
655 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
656 # construct the correlation table between the product names, there
657 # actual install directories and there install directory in archive
659 for prod_name, install_dir in l_install_dir:
660 prod_base_name=os.path.basename(install_dir)
661 if prod_base_name.startswith("config"):
662 # case of a products installed in base. We remove "config-i"
663 prod_base_name=os.path.basename(os.path.dirname(install_dir))
664 path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
665 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
667 for prod_name, source_dir in l_source_dir:
668 path_in_archive = os.path.join("SOURCES", prod_name)
669 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
671 # for packages of SALOME applications including KERNEL,
672 # we produce a salome launcher or a virtual application (depending on salome version)
673 if 'KERNEL' in config.APPLICATION.products:
674 VersionSalome = src.get_salome_version(config)
675 # Case where SALOME has the launcher that uses the SalomeContext API
676 if VersionSalome >= 730:
677 # create the relative launcher and add it to the files to add
678 launcher_name = src.get_launcher_name(config)
679 launcher_package = produce_relative_launcher(config,
684 d_products["launcher"] = (launcher_package, launcher_name)
686 # if the application contains mesa products, we generate in addition to the
687 # classical salome launcher a launcher using mesa and called mesa_salome
688 # (the mesa launcher will be used for remote usage through ssh).
689 if generate_mesa_launcher:
690 #if there is one : store the use_mesa property
691 restore_use_mesa_option=None
692 if ('properties' in config.APPLICATION and
693 'use_mesa' in config.APPLICATION.properties):
694 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
696 # activate mesa property, and generate a mesa launcher
697 src.activate_mesa_property(config) #activate use_mesa property
698 launcher_mesa_name="mesa_"+launcher_name
699 launcher_package_mesa = produce_relative_launcher(config,
704 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
706 # if there was a use_mesa value, we restore it
707 # else we set it to the default value "no"
708 if restore_use_mesa_option != None:
709 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
711 config.APPLICATION.properties.use_mesa="no"
714 # if we mix binaries and sources, we add a copy of the launcher,
715 # prefixed with "bin",in order to avoid clashes
716 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
718 # Provide a script for the creation of an application EDF style
719 appli_script = product_appli_creation_script(config,
724 d_products["appli script"] = (appli_script, "create_appli.py")
726 # Put also the environment file
727 env_file = produce_relative_env_files(config,
732 if src.architecture.is_windows():
733 filename = "env_launch.bat"
735 filename = "env_launch.sh"
736 d_products["environment file"] = (env_file, filename)
739 def source_package(sat, config, logger, options, tmp_working_dir):
740 '''Prepare a dictionary that stores all the needed directories and files to
741 add in a source package.
743 :param config Config: The global configuration.
744 :param logger Logger: the logging instance
745 :param options OptResult: the options of the launched command
746 :param tmp_working_dir str: The temporary local directory containing some
747 specific directories or files needed in the
749 :return: the dictionary that stores all the needed directories and files to
750 add in a source package.
751 {label : (path_on_local_machine, path_in_archive)}
756 # Get all the products that are prepared using an archive
757 # unless ftp mode is specified (in this case the user of the
758 # archive will get the sources through the ftp mode of sat prepare
760 logger.write("Find archive products ... ")
761 d_archives, l_pinfo_vcs = get_archives(config, logger)
762 logger.write("Done\n")
765 if not options.with_vcs and len(l_pinfo_vcs) > 0:
766 # Make archives with the products that are not prepared using an archive
767 # (git, cvs, svn, etc)
768 logger.write("Construct archives for vcs products ... ")
769 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
774 logger.write("Done\n")
777 logger.write("Create the project ... ")
778 d_project = create_project_for_src_package(config,
782 logger.write("Done\n")
785 tmp_sat = add_salomeTools(config, tmp_working_dir)
786 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
788 # Add a sat symbolic link if not win
789 if not src.architecture.is_windows():
790 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
794 # In the jobs, os.getcwd() can fail
795 t = config.LOCAL.workdir
796 os.chdir(tmp_working_dir)
797 if os.path.lexists(tmp_satlink_path):
798 os.remove(tmp_satlink_path)
799 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
802 d_sat["sat link"] = (tmp_satlink_path, "sat")
804 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
807 def get_archives(config, logger):
808 '''Find all the products that are get using an archive and all the products
809 that are get using a vcs (git, cvs, svn) repository.
811 :param config Config: The global configuration.
812 :param logger Logger: the logging instance
813 :return: the dictionary {name_product :
814 (local path of its archive, path in the package of its archive )}
815 and the list of specific configuration corresponding to the vcs
819 # Get the list of product informations
820 l_products_name = config.APPLICATION.products.keys()
821 l_product_info = src.product.get_products_infos(l_products_name,
825 for p_name, p_info in l_product_info:
826 # skip product with property not_in_package set to yes
827 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
829 # ignore the native and fixed products
830 if (src.product.product_is_native(p_info)
831 or src.product.product_is_fixed(p_info)):
833 if p_info.get_source == "archive":
834 archive_path = p_info.archive_info.archive_name
835 archive_name = os.path.basename(archive_path)
836 d_archives[p_name] = (archive_path,
837 os.path.join(ARCHIVE_DIR, archive_name))
838 if (src.appli_test_property(config,"pip", "yes") and
839 src.product.product_test_property(p_info,"pip", "yes")):
840 # if pip mode is activated, and product is managed by pip
841 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
842 pip_wheel_pattern=os.path.join(pip_wheels_dir,
843 "%s-%s*" % (p_info.name, p_info.version))
844 pip_wheel_path=glob.glob(pip_wheel_pattern)
845 msg_pip_not_found="Error in get_archive, pip wheel for "\
846 "product %s-%s was not found in %s directory"
847 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
848 "product %s-%s were found in %s directory"
849 if len(pip_wheel_path)==0:
850 raise src.SatException(msg_pip_not_found %\
851 (p_info.name, p_info.version, pip_wheels_dir))
852 if len(pip_wheel_path)>1:
853 raise src.SatException(msg_pip_two_or_more %\
854 (p_info.name, p_info.version, pip_wheels_dir))
856 pip_wheel_name=os.path.basename(pip_wheel_path[0])
857 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0],
858 os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
860 # this product is not managed by archive,
861 # an archive of the vcs directory will be created by get_archive_vcs
862 l_pinfo_vcs.append((p_name, p_info))
864 return d_archives, l_pinfo_vcs
866 def add_salomeTools(config, tmp_working_dir):
867 '''Prepare a version of salomeTools that has a specific local.pyconf file
868 configured for a source package.
870 :param config Config: The global configuration.
871 :param tmp_working_dir str: The temporary local directory containing some
872 specific directories or files needed in the
874 :return: The path to the local salomeTools directory to add in the package
877 # Copy sat in the temporary working directory
878 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
879 sat_running_path = src.Path(config.VARS.salometoolsway)
880 sat_running_path.copy(sat_tmp_path)
882 # Update the local.pyconf file that contains the path to the project
883 local_pyconf_name = "local.pyconf"
884 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
885 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
886 # Remove the .pyconf file in the root directory of salomeTools if there is
887 # any. (For example when launching jobs, a pyconf file describing the jobs
888 # can be here and is not useful)
889 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
890 for file_or_dir in files_or_dir_SAT:
891 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
892 file_path = os.path.join(tmp_working_dir,
897 ff = open(local_pyconf_file, "w")
898 ff.write(LOCAL_TEMPLATE)
901 return sat_tmp_path.path
903 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
904 '''For sources package that require that all products are get using an
905 archive, one has to create some archive for the vcs products.
906 So this method calls the clean and source command of sat and then create
909 :param l_pinfo_vcs List: The list of specific configuration corresponding to
911 :param sat Sat: The Sat instance that can be called to clean and source the
913 :param config Config: The global configuration.
914 :param logger Logger: the logging instance
915 :param tmp_working_dir str: The temporary local directory containing some
916 specific directories or files needed in the
918 :return: the dictionary that stores all the archives to add in the source
919 package. {label : (path_on_local_machine, path_in_archive)}
922 # clean the source directory of all the vcs products, then use the source
923 # command and thus construct an archive that will not contain the patches
924 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
925 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
926 logger.write(_("\nclean sources\n"))
927 args_clean = config.VARS.application
928 args_clean += " --sources --products "
929 args_clean += ",".join(l_prod_names)
930 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
931 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
934 logger.write(_("get sources\n"))
935 args_source = config.VARS.application
936 args_source += " --products "
937 args_source += ",".join(l_prod_names)
938 svgDir = sat.cfg.APPLICATION.workdir
939 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
940 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
941 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
942 # DBG.write("sat config id", id(sat.cfg), True)
943 # shit as config is not same id() as for sat.source()
944 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
946 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
948 # make the new archives
950 for pn, pinfo in l_pinfo_vcs:
951 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
952 logger.write("make archive vcs '%s'\n" % path_archive)
953 d_archives_vcs[pn] = (path_archive,
954 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
955 sat.cfg.APPLICATION.workdir = svgDir
956 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
957 return d_archives_vcs
959 def make_archive(prod_name, prod_info, where):
960 '''Create an archive of a product by searching its source directory.
962 :param prod_name str: The name of the product.
963 :param prod_info Config: The specific configuration corresponding to the
965 :param where str: The path of the repository where to put the resulting
967 :return: The path of the resulting archive
970 path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
971 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
972 local_path = prod_info.source_dir
974 tar_prod.add(local_path,
976 exclude=exclude_VCS_and_extensions_26)
978 tar_prod.add(local_path,
980 filter=exclude_VCS_and_extensions)
982 return path_targz_prod
984 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
985 '''Create a specific project for a source package.
987 :param config Config: The global configuration.
988 :param tmp_working_dir str: The temporary local directory containing some
989 specific directories or files needed in the
991 :param with_vcs boolean: True if the package is with vcs products (not
992 transformed into archive products)
993 :param with_ftp boolean: True if the package use ftp servers to get archives
994 :return: The dictionary
995 {"project" : (produced project, project path in the archive)}
999 # Create in the working temporary directory the full project tree
1000 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
1001 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
1003 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
1006 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
1009 patches_tmp_dir = os.path.join(project_tmp_dir,
1012 application_tmp_dir = os.path.join(project_tmp_dir,
1014 for directory in [project_tmp_dir,
1015 compil_scripts_tmp_dir,
1016 env_scripts_tmp_dir,
1018 application_tmp_dir]:
1019 src.ensure_path_exists(directory)
1021 # Create the pyconf that contains the information of the project
1022 project_pyconf_name = "project.pyconf"
1023 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
1024 ff = open(project_pyconf_file, "w")
1025 ff.write(PROJECT_TEMPLATE)
1026 if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
1027 ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
1028 for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
1029 ftp_path=ftp_path+":"+ftpserver
1031 ff.write("# ftp servers where to search for prerequisite archives\n")
1033 # add licence paths if any
1034 if len(config.PATHS.LICENCEPATH) > 0:
1035 licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1036 for path in config.PATHS.LICENCEPATH[1:]:
1037 licence_path=licence_path+":"+path
1039 ff.write("\n# Where to search for licences\n")
1040 ff.write(licence_path)
1045 # Loop over the products to get there pyconf and all the scripts
1046 # (compilation, environment, patches)
1047 # and create the pyconf file to add to the project
1048 lproducts_name = config.APPLICATION.products.keys()
1049 l_products = src.product.get_products_infos(lproducts_name, config)
1050 for p_name, p_info in l_products:
1051 # skip product with property not_in_package set to yes
1052 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1054 find_product_scripts_and_pyconf(p_name,
1058 compil_scripts_tmp_dir,
1059 env_scripts_tmp_dir,
1061 products_pyconf_tmp_dir)
1063 # for the application pyconf, we write directly the config
1064 # don't search for the original pyconf file
1065 # to avoid problems with overwrite sections and rm_products key
1066 write_application_pyconf(config, application_tmp_dir)
1068 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1071 def find_product_scripts_and_pyconf(p_name,
1075 compil_scripts_tmp_dir,
1076 env_scripts_tmp_dir,
1078 products_pyconf_tmp_dir):
1079 '''Create a specific pyconf file for a given product. Get its environment
1080 script, its compilation script and patches and put it in the temporary
1081 working directory. This method is used in the source package in order to
1082 construct the specific project.
1084 :param p_name str: The name of the product.
1085 :param p_info Config: The specific configuration corresponding to the
1087 :param config Config: The global configuration.
1088 :param with_vcs boolean: True if the package is with vcs products (not
1089 transformed into archive products)
1090 :param compil_scripts_tmp_dir str: The path to the temporary compilation
1091 scripts directory of the project.
1092 :param env_scripts_tmp_dir str: The path to the temporary environment script
1093 directory of the project.
1094 :param patches_tmp_dir str: The path to the temporary patch scripts
1095 directory of the project.
1096 :param products_pyconf_tmp_dir str: The path to the temporary product
1097 scripts directory of the project.
1100 # read the pyconf of the product
1101 product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1103 # find the compilation script if any
1104 if src.product.product_has_script(p_info):
1105 compil_script_path = src.Path(p_info.compil_script)
1106 compil_script_path.copy(compil_scripts_tmp_dir)
1108 # find the environment script if any
1109 if src.product.product_has_env_script(p_info):
1110 env_script_path = src.Path(p_info.environ.env_script)
1111 env_script_path.copy(env_scripts_tmp_dir)
1113 # find the patches if any
1114 if src.product.product_has_patches(p_info):
1115 patches = src.pyconf.Sequence()
1116 for patch_path in p_info.patches:
1117 p_path = src.Path(patch_path)
1118 p_path.copy(patches_tmp_dir)
1119 patches.append(os.path.basename(patch_path), "")
1121 if (not with_vcs) and src.product.product_is_vcs(p_info):
1122 # in non vcs mode, if the product is not archive, then make it become archive.
1124 # depending upon the incremental mode, select impacted sections
1125 if "properties" in p_info and "incremental" in p_info.properties and\
1126 p_info.properties.incremental == "yes":
1127 sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1129 sections = [p_info.section]
1130 for section in sections:
1131 if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1132 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1134 product_pyconf_cfg[section].get_source = "archive"
1135 if not "archive_info" in product_pyconf_cfg[section]:
1136 product_pyconf_cfg[section].addMapping("archive_info",
1137 src.pyconf.Mapping(product_pyconf_cfg),
1139 product_pyconf_cfg[section].archive_info.archive_name =\
1140 p_info.name + ".tgz"
1142 # write the pyconf file to the temporary project location
1143 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1145 ff = open(product_tmp_pyconf_path, 'w')
1146 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1147 product_pyconf_cfg.__save__(ff, 1)
1151 def write_application_pyconf(config, application_tmp_dir):
1152 '''Write the application pyconf file in the specific temporary
1153 directory containing the specific project of a source package.
1155 :param config Config: The global configuration.
1156 :param application_tmp_dir str: The path to the temporary application
1157 scripts directory of the project.
1159 application_name = config.VARS.application
1160 # write the pyconf file to the temporary application location
1161 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1162 application_name + ".pyconf")
1163 with open(application_tmp_pyconf_path, 'w') as f:
1164 f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1165 res = src.pyconf.Config()
1166 app = src.pyconf.deepCopyMapping(config.APPLICATION)
1167 # no base in packages
1170 # Change the workdir
1171 app.workdir = src.pyconf.Reference(
1174 'VARS.salometoolsway + $VARS.sep + ".."')
1175 res.addMapping("APPLICATION", app, "")
1176 res.__save__(f, evaluated=False)
1179 def sat_package(config, tmp_working_dir, options, logger):
1180 '''Prepare a dictionary that stores all the needed directories and files to
1181 add in a salomeTool package.
1183 :param tmp_working_dir str: The temporary local working directory
1184 :param options OptResult: the options of the launched command
1185 :return: the dictionary that stores all the needed directories and files to
1186 add in a salomeTool package.
1187 {label : (path_on_local_machine, path_in_archive)}
1192 # we include sat himself
1193 d_project["all_sat"]=(config.VARS.salometoolsway, "")
1195 # and we overwrite local.pyconf with a clean wersion.
1196 local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1197 local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1198 local_cfg = src.pyconf.Config(local_file_path)
1199 local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1200 local_cfg.LOCAL["base"] = "default"
1201 local_cfg.LOCAL["workdir"] = "default"
1202 local_cfg.LOCAL["log_dir"] = "default"
1203 local_cfg.LOCAL["archive_dir"] = "default"
1204 local_cfg.LOCAL["VCS"] = "None"
1205 local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1207 # if the archive contains a project, we write its relative path in local.pyconf
1209 project_arch_path = os.path.join("projects", options.project,
1210 os.path.basename(options.project_file_path))
1211 local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1213 ff = open(local_pyconf_tmp_path, 'w')
1214 local_cfg.__save__(ff, 1)
1216 d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1220 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1221 '''Prepare a dictionary that stores all the needed directories and files to
1222 add in a project package.
1224 :param project_file_path str: The path to the local project.
1225 :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1226 :param tmp_working_dir str: The temporary local directory containing some
1227 specific directories or files needed in the
1229 :param embedded_in_sat boolean : the project package is embedded in a sat package
1230 :return: the dictionary that stores all the needed directories and files to
1231 add in a project package.
1232 {label : (path_on_local_machine, path_in_archive)}
1236 # Read the project file and get the directories to add to the package
1239 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1242 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1243 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1244 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1246 paths = {"APPLICATIONPATH" : "applications",
1247 "PRODUCTPATH" : "products",
1249 "MACHINEPATH" : "machines"}
1251 paths["ARCHIVEPATH"] = "archives"
1253 # Loop over the project paths and add it
1254 project_file_name = os.path.basename(project_file_path)
1256 if path not in project_pyconf_cfg:
1259 dest_path = os.path.join("projects", name_project, paths[path])
1260 project_file_dest = os.path.join("projects", name_project, project_file_name)
1262 dest_path = paths[path]
1263 project_file_dest = project_file_name
1265 # Add the directory to the files to add in the package
1266 d_project[path] = (project_pyconf_cfg[path], dest_path)
1268 # Modify the value of the path in the package
1269 project_pyconf_cfg[path] = src.pyconf.Reference(
1272 'project_path + "/' + paths[path] + '"')
1274 # Modify some values
1275 if "project_path" not in project_pyconf_cfg:
1276 project_pyconf_cfg.addMapping("project_path",
1277 src.pyconf.Mapping(project_pyconf_cfg),
1279 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1282 # we don't want to export these two fields
1283 project_pyconf_cfg.__delitem__("file_path")
1284 project_pyconf_cfg.__delitem__("PWD")
1286 project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1288 # Write the project pyconf file
1289 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1290 ff = open(project_pyconf_tmp_path, 'w')
1291 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1292 project_pyconf_cfg.__save__(ff, 1)
1294 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1298 def add_readme(config, options, where):
1299 readme_path = os.path.join(where, "README")
1300 with codecs.open(readme_path, "w", 'utf-8') as f:
1302 # templates for building the header
1304 # This package was generated with sat $version
1307 # Distribution : $dist
1309 In the following, $$ROOT represents the directory where you have installed
1310 SALOME (the directory where this file is located).
1313 if src.architecture.is_windows():
1314 readme_header = readme_header.replace('$$ROOT','%ROOT%')
1315 readme_compilation_with_binaries="""
1317 compilation based on the binaries used as prerequisites
1318 =======================================================
1320 If you fail to compile the complete application (for example because
1321 you are not root on your system and cannot install missing packages), you
1322 may try a partial compilation based on the binaries.
1323 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1324 and do some substitutions on cmake and .la files (replace the build directories
1326 The procedure to do it is:
1327 1) Remove or rename INSTALL directory if it exists
1328 2) Execute the shell script install_bin.sh:
1331 3) Use SalomeTool (as explained in Sources section) and compile only the
1332 modules you need to (with -p option)
1335 readme_header_tpl=string.Template(readme_header)
1336 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1337 "README_BIN.template")
1338 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1339 "README_LAUNCHER.template")
1340 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1341 "README_BIN_VIRTUAL_APP.template")
1342 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1343 "README_SRC.template")
1344 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1345 "README_PROJECT.template")
1346 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1347 "README_SAT.template")
1349 # prepare substitution dictionary
1351 d['user'] = config.VARS.user
1352 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1353 d['version'] = src.get_salometool_version(config)
1354 d['dist'] = config.VARS.dist
1355 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1357 if options.binaries or options.sources:
1358 d['application'] = config.VARS.application
1359 d['BINARIES'] = config.INTERNAL.config.binary_dir
1360 d['SEPARATOR'] = config.VARS.sep
1361 if src.architecture.is_windows():
1362 d['operatingSystem'] = 'Windows'
1363 d['PYTHON3'] = 'python3'
1364 d['ROOT'] = '%ROOT%'
1366 d['operatingSystem'] = 'Linux'
1369 f.write("# Application: " + d['application'] + "\n")
1370 if 'KERNEL' in config.APPLICATION.products:
1371 VersionSalome = src.get_salome_version(config)
1372 # Case where SALOME has the launcher that uses the SalomeContext API
1373 if VersionSalome >= 730:
1374 d['launcher'] = config.APPLICATION.profile.launcher_name
1376 d['virtual_app'] = 'runAppli' # this info is not used now)
1378 # write the specific sections
1379 if options.binaries:
1380 f.write(src.template.substitute(readme_template_path_bin, d))
1381 if "virtual_app" in d:
1382 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1384 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1387 f.write(src.template.substitute(readme_template_path_src, d))
1389 if options.binaries and options.sources and not src.architecture.is_windows():
1390 f.write(readme_compilation_with_binaries)
1393 f.write(src.template.substitute(readme_template_path_pro, d))
1396 f.write(src.template.substitute(readme_template_path_sat, d))
1400 def update_config(config, logger, prop, value):
1401 '''Remove from config.APPLICATION.products the products that have the property given as input.
1403 :param config Config: The global config.
1404 :param prop str: The property to filter
1405 :param value str: The value of the property to filter
1407 # if there is no APPLICATION (ex sat package -t) : nothing to do
1408 if "APPLICATION" in config:
1409 l_product_to_remove = []
1410 for product_name in config.APPLICATION.products.keys():
1411 prod_cfg = src.product.get_product_config(config, product_name)
1412 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1413 l_product_to_remove.append(product_name)
1414 for product_name in l_product_to_remove:
1415 config.APPLICATION.products.__delitem__(product_name)
1416 logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1419 '''method that is called when salomeTools is called with --help option.
1421 :return: The text to display for the package command description.
1425 The package command creates a tar file archive of a product.
1426 There are four kinds of archive, which can be mixed:
1428 1 - The binary archive.
1429 It contains the product installation directories plus a launcher.
1430 2 - The sources archive.
1431 It contains the product archives, a project (the application plus salomeTools).
1432 3 - The project archive.
1433 It contains a project (give the project file path as argument).
1434 4 - The salomeTools archive.
1435 It contains code utility salomeTools.
1438 >> sat package SALOME-master --binaries --sources""")
1440 def run(args, runner, logger):
1441 '''method that is called when salomeTools is called with package parameter.
1445 (options, args) = parser.parse_args(args)
1447 # Check that a type of package is called, and only one
1448 all_option_types = (options.binaries,
1450 options.project not in ["", None],
1453 # Check if no option for package type
1454 if all_option_types.count(True) == 0:
1455 msg = _("Error: Precise a type for the package\nUse one of the "
1456 "following options: --binaries, --sources, --project or"
1458 logger.write(src.printcolors.printcError(msg), 1)
1459 logger.write("\n", 1)
1462 # The repository where to put the package if not Binary or Source
1463 package_default_path = runner.cfg.LOCAL.workdir
1465 # if the package contains binaries or sources:
1466 if options.binaries or options.sources:
1467 # Check that the command has been called with an application
1468 src.check_config_has_application(runner.cfg)
1470 # Display information
1471 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1472 runner.cfg.VARS.application), 1)
1474 # Get the default directory where to put the packages
1475 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1476 src.ensure_path_exists(package_default_path)
1478 # if the package contains a project:
1480 # check that the project is visible by SAT
1481 projectNameFile = options.project + ".pyconf"
1483 for i in runner.cfg.PROJECTS.project_file_paths:
1484 baseName = os.path.basename(i)
1485 if baseName == projectNameFile:
1489 if foundProject is None:
1490 local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1491 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1495 Please add it in file:
1497 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1498 logger.write(src.printcolors.printcError(msg), 1)
1499 logger.write("\n", 1)
1502 options.project_file_path = foundProject
1503 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1505 # Remove the products that are filtered by the --without_properties option
1506 if options.without_properties:
1507 prop, value = options.without_properties
1508 update_config(runner.cfg, logger, prop, value)
1510 # Remove from config the products that have the not_in_package property
1511 update_config(runner.cfg, logger, "not_in_package", "yes")
1513 # for binary packages without sources, remove compile time products
1514 if options.binaries and (not options.sources):
1515 update_config(runner.cfg, logger, "compile_time", "yes")
1517 # get the name of the archive or build it
1519 if os.path.basename(options.name) == options.name:
1520 # only a name (not a path)
1521 archive_name = options.name
1522 dir_name = package_default_path
1524 archive_name = os.path.basename(options.name)
1525 dir_name = os.path.dirname(options.name)
1527 # suppress extension
1528 if archive_name[-len(".tgz"):] == ".tgz":
1529 archive_name = archive_name[:-len(".tgz")]
1530 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1531 archive_name = archive_name[:-len(".tar.gz")]
1535 dir_name = package_default_path
1536 if options.binaries or options.sources:
1537 archive_name = runner.cfg.APPLICATION.name
1539 if options.binaries:
1540 archive_name += "-"+runner.cfg.VARS.dist
1543 archive_name += "-SRC"
1544 if options.with_vcs:
1545 archive_name += "-VCS"
1548 archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1553 archive_name += ("satproject_" + options.project)
1555 if len(archive_name)==0: # no option worked
1556 msg = _("Error: Cannot name the archive\n"
1557 " check if at least one of the following options was "
1558 "selected : --binaries, --sources, --project or"
1560 logger.write(src.printcolors.printcError(msg), 1)
1561 logger.write("\n", 1)
1564 path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1566 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1568 # Create a working directory for all files that are produced during the
1569 # package creation and that will be removed at the end of the command
1570 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1571 src.ensure_path_exists(tmp_working_dir)
1572 logger.write("\n", 5)
1573 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1575 logger.write("\n", 3)
1577 msg = _("Preparation of files to add to the archive")
1578 logger.write(src.printcolors.printcLabel(msg), 2)
1579 logger.write("\n", 2)
1581 d_files_to_add={} # content of the archive
1583 # a dict to hold paths that will need to be substitute for users recompilations
1584 d_paths_to_substitute={}
1586 if options.binaries:
1587 d_bin_files_to_add = binary_package(runner.cfg,
1591 # for all binaries dir, store the substitution that will be required
1592 # for extra compilations
1593 for key in d_bin_files_to_add:
1594 if key.endswith("(bin)"):
1595 source_dir = d_bin_files_to_add[key][0]
1596 path_in_archive = d_bin_files_to_add[key][1].replace(
1597 runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1598 runner.cfg.INTERNAL.config.install_dir)
1599 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1600 # if basename is the same we will just substitute the dirname
1601 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1602 os.path.dirname(path_in_archive)
1604 d_paths_to_substitute[source_dir]=path_in_archive
1606 d_files_to_add.update(d_bin_files_to_add)
1608 d_files_to_add.update(source_package(runner,
1613 if options.binaries:
1614 # for archives with bin and sources we provide a shell script able to
1615 # install binaries for compilation
1616 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1618 d_paths_to_substitute,
1620 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1621 logger.write("substitutions that need to be done later : \n", 5)
1622 logger.write(str(d_paths_to_substitute), 5)
1623 logger.write("\n", 5)
1625 # --salomeTool option is not considered when --sources is selected, as this option
1626 # already brings salomeTool!
1628 d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir,
1632 DBG.write("config for package %s" % options.project, runner.cfg)
1633 d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1635 if not(d_files_to_add):
1636 msg = _("Error: Empty dictionnary to build the archive!\n")
1637 logger.write(src.printcolors.printcError(msg), 1)
1638 logger.write("\n", 1)
1641 # Add the README file in the package
1642 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1643 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1645 # Add the additional files of option add_files
1646 if options.add_files:
1647 for file_path in options.add_files:
1648 if not os.path.exists(file_path):
1649 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1651 file_name = os.path.basename(file_path)
1652 d_files_to_add[file_name] = (file_path, file_name)
1654 logger.write("\n", 2)
1655 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1656 logger.write("\n", 2)
1657 logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1661 # Creating the object tarfile
1662 tar = tarfile.open(path_targz, mode='w:gz')
1664 # get the filtering function if needed
1666 filter_function = exclude_VCS_and_extensions_26
1668 filter_function = exclude_VCS_and_extensions
1670 # Add the files to the tarfile object
1671 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1673 except KeyboardInterrupt:
1674 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1675 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1676 # remove the working directory
1677 shutil.rmtree(tmp_working_dir)
1678 logger.write(_("OK"), 1)
1679 logger.write(_("\n"), 1)
1682 # case if no application, only package sat as 'sat package -t'
1684 app = runner.cfg.APPLICATION
1688 # unconditionaly remove the tmp_local_working_dir
1690 tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1691 if os.path.isdir(tmp_local_working_dir):
1692 shutil.rmtree(tmp_local_working_dir)
1694 # remove the tmp directory, unless user has registered as developer
1695 if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1696 shutil.rmtree(tmp_working_dir)
1698 # Print again the path of the package
1699 logger.write("\n", 2)
1700 src.printcolors.print_value(logger, "Package path", path_targz, 2)