3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
31 from application import get_SALOME_modules
32 import src.debug as DBG
39 ARCHIVE_DIR = "ARCHIVES"
40 PROJECT_DIR = "PROJECT"
42 IGNORED_DIRS = [".git", ".svn"]
43 IGNORED_EXTENSIONS = []
45 PACKAGE_EXT=".tar.gz" # the extension we use for the packages
47 PROJECT_TEMPLATE = """#!/usr/bin/env python
50 # The path to the archive root directory
51 root_path : $PWD + "/../"
53 project_path : $PWD + "/"
55 # Where to search the archives of the products
56 ARCHIVEPATH : $root_path + "ARCHIVES"
57 # Where to search the pyconf of the applications
58 APPLICATIONPATH : $project_path + "applications/"
59 # Where to search the pyconf of the products
60 PRODUCTPATH : $project_path + "products/"
61 # Where to search the pyconf of the jobs of the project
62 JOBPATH : $project_path + "jobs/"
63 # Where to search the pyconf of the machines of the project
64 MACHINEPATH : $project_path + "machines/"
67 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
75 archive_dir : 'default'
82 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
83 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
87 # Define all possible option for the package command : sat package <options>
88 parser = src.options.Options()
89 parser.add_option('b', 'binaries', 'boolean', 'binaries',
90 _('Optional: Produce a binary package.'), False)
91 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
92 _('Optional: Only binary package: produce the archive even if '
93 'there are some missing products.'), False)
94 parser.add_option('s', 'sources', 'boolean', 'sources',
95 _('Optional: Produce a compilable archive of the sources of the '
96 'application.'), False)
97 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
98 _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
99 'Sat prepare will use VCS mode instead to retrieve them'),
101 parser.add_option('', 'ftp', 'boolean', 'ftp',
102 _('Optional: Do not embed archives for products in archive mode.'
103 'Sat prepare will use ftp instead to retrieve them'),
105 parser.add_option('p', 'project', 'string', 'project',
106 _('Optional: Produce an archive that contains a project.'), "")
107 parser.add_option('t', 'salometools', 'boolean', 'sat',
108 _('Optional: Produce an archive that contains salomeTools.'), False)
109 parser.add_option('n', 'name', 'string', 'name',
110 _('Optional: The name or full path of the archive.'), None)
111 parser.add_option('', 'add_files', 'list2', 'add_files',
112 _('Optional: The list of additional files to add to the archive.'), [])
113 parser.add_option('', 'without_properties', 'properties', 'without_properties',
114 _('Optional: Filter the products by their properties.\n\tSyntax: '
115 '--without_properties <property>:<value>'))
118 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
119 '''Create an archive containing all directories and files that are given in
120 the d_content argument.
122 :param tar tarfile: The tarfile instance used to make the archive.
123 :param name_archive str: The name of the archive to make.
124 :param d_content dict: The dictionary that contain all directories and files
125 to add in the archive.
127 (path_on_local_machine, path_in_archive)
128 :param logger Logger: the logging instance
129 :param f_exclude Function: the function that filters
130 :return: 0 if success, 1 if not.
133 # get the max length of the messages in order to make the display
134 max_len = len(max(d_content.keys(), key=len))
137 # loop over each directory or file stored in the d_content dictionary
138 names = sorted(d_content.keys())
139 DBG.write("add tar names", names)
141 # used to avoid duplications (for pip install in python, or single_install_dir cases)
144 # display information
145 len_points = max_len - len(name) + 3
146 local_path, archive_path = d_content[name]
147 in_archive = os.path.join(name_archive, archive_path)
148 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
149 # Get the local path and the path in archive
150 # of the directory or file to add
151 # Add it in the archive
153 key=local_path+"->"+in_archive
154 if key not in already_added:
155 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
156 already_added.add(key)
157 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
158 except Exception as e:
159 logger.write(src.printcolors.printcError(_("KO ")), 3)
160 logger.write(str(e), 3)
162 logger.write("\n", 3)
165 def exclude_VCS_and_extensions(filename):
166 ''' The function that is used to exclude from package the link to the
167 VCS repositories (like .git)
169 :param filename Str: The filname to exclude (or not).
170 :return: True if the file has to be exclude
173 for dir_name in IGNORED_DIRS:
174 if dir_name in filename:
176 for extension in IGNORED_EXTENSIONS:
177 if filename.endswith(extension):
181 def produce_relative_launcher(config,
186 '''Create a specific SALOME launcher for the binary package. This launcher
189 :param config Config: The global configuration.
190 :param logger Logger: the logging instance
191 :param file_dir str: the directory where to put the launcher
192 :param file_name str: The launcher name
193 :param binaries_dir_name str: the name of the repository where the binaries
195 :return: the path of the produced launcher
199 # get KERNEL installation path
200 kernel_info = src.product.get_product_config(config, "KERNEL")
201 kernel_base_name=os.path.basename(kernel_info.install_dir)
202 if kernel_base_name.startswith("config"):
203 # case of kernel installed in base. We remove "config-i"
204 kernel_base_name=os.path.basename(os.path.dirname(kernel_info.install_dir))
206 kernel_root_dir = os.path.join(binaries_dir_name, kernel_base_name)
208 # set kernel bin dir (considering fhs property)
209 kernel_cfg = src.product.get_product_config(config, "KERNEL")
210 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
211 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
213 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
215 # check if the application contains an application module
216 # check also if the application has a distene product,
217 # in this case get its licence file name
218 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
219 salome_application_name="Not defined"
220 distene_licence_file_name=False
221 for prod_name, prod_info in l_product_info:
222 # look for a "salome application" and a distene product
223 if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
224 distene_licence_file_name = src.product.product_has_licence(prod_info,
225 config.PATHS.LICENCEPATH)
226 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
227 salome_application_name=prod_info.name
229 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
230 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
231 if salome_application_name == "Not defined":
232 app_root_dir=kernel_root_dir
234 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
237 additional_env['sat_bin_kernel_install_dir'] = "out_dir_Path + " +\
238 config.VARS.sep + bin_kernel_install_dir
239 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
240 additional_env['sat_python_version'] = 3
242 additional_env['sat_python_version'] = 2
244 additional_env['ABSOLUTE_APPLI_PATH'] = "out_dir_Path" + config.VARS.sep + app_root_dir
246 # create an environment file writer
247 writer = src.environment.FileEnvWriter(config,
253 filepath = os.path.join(file_dir, file_name)
255 writer.write_env_file(filepath,
258 additional_env=additional_env,
259 no_path_init="False",
260 for_package = binaries_dir_name)
262 # Little hack to put out_dir_Path outside the strings
263 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
264 src.replace_in_file(filepath, "r'out_dir_Path + ", "out_dir_Path + r'" )
266 # A hack to put a call to a file for distene licence.
267 # It does nothing to an application that has no distene product
268 if distene_licence_file_name:
269 logger.write("Application has a distene licence file! We use it in package launcher", 5)
270 hack_for_distene_licence(filepath, distene_licence_file_name)
272 # change the rights in order to make the file executable for everybody
284 def hack_for_distene_licence(filepath, licence_file):
285 '''Replace the distene licence env variable by a call to a file.
287 :param filepath Str: The path to the launcher to modify.
289 shutil.move(filepath, filepath + "_old")
291 filein = filepath + "_old"
292 fin = open(filein, "r")
293 fout = open(fileout, "w")
294 text = fin.readlines()
295 # Find the Distene section
297 for i,line in enumerate(text):
298 if "# Set DISTENE License" in line:
302 # No distene product, there is nothing to do
308 del text[num_line +1]
309 del text[num_line +1]
310 text_to_insert =""" try:
311 distene_licence_file=r"%s"
312 if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
313 import importlib.util
314 spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
315 distene=importlib.util.module_from_spec(spec_dist)
316 spec_dist.loader.exec_module(distene)
319 distene = imp.load_source('distene_licence', distene_licence_file)
320 distene.set_distene_variables(context)
322 pass\n""" % licence_file
323 text.insert(num_line + 1, text_to_insert)
330 def produce_relative_env_files(config,
334 '''Create some specific environment files for the binary package. These
335 files use relative paths.
337 :param config Config: The global configuration.
338 :param logger Logger: the logging instance
339 :param file_dir str: the directory where to put the files
340 :param binaries_dir_name str: the name of the repository where the binaries
342 :return: the list of path of the produced environment files
345 # create an environment file writer
346 writer = src.environment.FileEnvWriter(config,
351 if src.architecture.is_windows():
353 filename = "env_launch.bat"
356 filename = "env_launch.sh"
359 filepath = writer.write_env_file(filename,
362 for_package = binaries_dir_name)
364 # Little hack to put out_dir_Path as environment variable
365 if src.architecture.is_windows() :
366 src.replace_in_file(filepath, '"out_dir_Path', '"%out_dir_Path%' )
367 src.replace_in_file(filepath, '=out_dir_Path', '=%out_dir_Path%' )
368 src.replace_in_file(filepath, ';out_dir_Path', ';%out_dir_Path%' )
370 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
371 src.replace_in_file(filepath, ':out_dir_Path', ':${out_dir_Path}' )
373 # change the rights in order to make the file executable for everybody
385 def produce_install_bin_file(config,
390 '''Create a bash shell script which do substitutions in BIRARIES dir
391 in order to use it for extra compilations.
393 :param config Config: The global configuration.
394 :param logger Logger: the logging instance
395 :param file_dir str: the directory where to put the files
396 :param d_sub, dict: the dictionnary that contains the substitutions to be done
397 :param file_name str: the name of the install script file
398 :return: the produced file
402 filepath = os.path.join(file_dir, file_name)
403 # open the file and write into it
404 # use codec utf-8 as sat variables are in unicode
405 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
406 installbin_template_path = os.path.join(config.VARS.internal_dir,
407 "INSTALL_BIN.template")
409 # build the name of the directory that will contain the binaries
410 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
411 # build the substitution loop
412 loop_cmd = "for f in $(grep -RIl"
414 loop_cmd += " -e "+ key
415 loop_cmd += ' ' + config.INTERNAL.config.install_dir +\
418 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
419 loop_cmd += ' " $f\ndone'
422 d["BINARIES_DIR"] = binaries_dir_name
423 d["SUBSTITUTION_LOOP"]=loop_cmd
424 d["INSTALL_DIR"]=config.INTERNAL.config.install_dir
426 # substitute the template and write it in file
427 content=src.template.substitute(installbin_template_path, d)
428 installbin_file.write(content)
429 # change the rights in order to make the file executable for everybody
441 def product_appli_creation_script(config,
445 '''Create a script that can produce an application (EDF style) in the binary
448 :param config Config: The global configuration.
449 :param logger Logger: the logging instance
450 :param file_dir str: the directory where to put the file
451 :param binaries_dir_name str: the name of the repository where the binaries
453 :return: the path of the produced script file
456 template_name = "create_appli.py.for_bin_packages.template"
457 template_path = os.path.join(config.VARS.internal_dir, template_name)
458 text_to_fill = open(template_path, "r").read()
459 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
460 '"' + binaries_dir_name + '"')
463 for product_name in get_SALOME_modules(config):
464 product_info = src.product.get_product_config(config, product_name)
466 if src.product.product_is_smesh_plugin(product_info):
469 if 'install_dir' in product_info and bool(product_info.install_dir):
470 if src.product.product_is_cpp(product_info):
472 for cpp_name in src.product.get_product_components(product_info):
473 line_to_add = ("<module name=\"" +
475 "\" gui=\"yes\" path=\"''' + "
476 "os.path.join(dir_bin_name, \"" +
477 cpp_name + "\") + '''\"/>")
480 line_to_add = ("<module name=\"" +
482 "\" gui=\"yes\" path=\"''' + "
483 "os.path.join(dir_bin_name, \"" +
484 product_name + "\") + '''\"/>")
485 text_to_add += line_to_add + "\n"
487 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
489 tmp_file_path = os.path.join(file_dir, "create_appli.py")
490 ff = open(tmp_file_path, "w")
491 ff.write(filled_text)
494 # change the rights in order to make the file executable for everybody
495 os.chmod(tmp_file_path,
506 def binary_package(config, logger, options, tmp_working_dir):
507 '''Prepare a dictionary that stores all the needed directories and files to
508 add in a binary package.
510 :param config Config: The global configuration.
511 :param logger Logger: the logging instance
512 :param options OptResult: the options of the launched command
513 :param tmp_working_dir str: The temporary local directory containing some
514 specific directories or files needed in the
516 :return: the dictionary that stores all the needed directories and files to
517 add in a binary package.
518 {label : (path_on_local_machine, path_in_archive)}
522 # Get the list of product installation to add to the archive
523 l_products_name = sorted(config.APPLICATION.products.keys())
524 l_product_info = src.product.get_products_infos(l_products_name,
529 l_sources_not_present = []
530 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
531 if ("APPLICATION" in config and
532 "properties" in config.APPLICATION and
533 "mesa_launcher_in_package" in config.APPLICATION.properties and
534 config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
535 generate_mesa_launcher=True
537 for prod_name, prod_info in l_product_info:
538 # skip product with property not_in_package set to yes
539 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
542 # Add the sources of the products that have the property
543 # sources_in_package : "yes"
544 if src.get_property_in_product_cfg(prod_info,
545 "sources_in_package") == "yes":
546 if os.path.exists(prod_info.source_dir):
547 l_source_dir.append((prod_name, prod_info.source_dir))
549 l_sources_not_present.append(prod_name)
551 # ignore the native and fixed products for install directories
552 if (src.product.product_is_native(prod_info)
553 or src.product.product_is_fixed(prod_info)
554 or not src.product.product_compiles(prod_info)):
556 if src.product.check_installation(config, prod_info):
557 l_install_dir.append((prod_name, prod_info.install_dir))
559 l_not_installed.append(prod_name)
561 # Add also the cpp generated modules (if any)
562 if src.product.product_is_cpp(prod_info):
564 for name_cpp in src.product.get_product_components(prod_info):
565 install_dir = os.path.join(config.APPLICATION.workdir,
566 config.INTERNAL.config.install_dir,
568 if os.path.exists(install_dir):
569 l_install_dir.append((name_cpp, install_dir))
571 l_not_installed.append(name_cpp)
573 # check the name of the directory that (could) contains the binaries
574 # from previous detar
575 binaries_from_detar = os.path.join(
576 config.APPLICATION.workdir,
577 config.INTERNAL.config.binary_dir + config.VARS.dist)
578 if os.path.exists(binaries_from_detar):
580 WARNING: existing binaries directory from previous detar installation:
582 To make new package from this, you have to:
583 1) install binaries in INSTALL directory with the script "install_bin.sh"
584 see README file for more details
585 2) or recompile everything in INSTALL with "sat compile" command
586 this step is long, and requires some linux packages to be installed
588 """ % binaries_from_detar)
590 # Print warning or error if there are some missing products
591 if len(l_not_installed) > 0:
592 text_missing_prods = ""
593 for p_name in l_not_installed:
594 text_missing_prods += " - " + p_name + "\n"
595 if not options.force_creation:
596 msg = _("ERROR: there are missing product installations:")
597 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
600 raise src.SatException(msg)
602 msg = _("WARNING: there are missing products installations:")
603 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
607 # Do the same for sources
608 if len(l_sources_not_present) > 0:
609 text_missing_prods = ""
610 for p_name in l_sources_not_present:
611 text_missing_prods += "-" + p_name + "\n"
612 if not options.force_creation:
613 msg = _("ERROR: there are missing product sources:")
614 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
617 raise src.SatException(msg)
619 msg = _("WARNING: there are missing products sources:")
620 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
624 # construct the name of the directory that will contain the binaries
625 if src.architecture.is_windows():
626 binaries_dir_name = config.INTERNAL.config.binary_dir
628 binaries_dir_name = config.INTERNAL.config.binary_dir + config.VARS.dist
629 # construct the correlation table between the product names, there
630 # actual install directories and there install directory in archive
632 for prod_name, install_dir in l_install_dir:
633 prod_base_name=os.path.basename(install_dir)
634 if prod_base_name.startswith("config"):
635 # case of a products installed in base. We remove "config-i"
636 prod_base_name=os.path.basename(os.path.dirname(install_dir))
637 path_in_archive = os.path.join(binaries_dir_name, prod_base_name)
638 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
640 for prod_name, source_dir in l_source_dir:
641 path_in_archive = os.path.join("SOURCES", prod_name)
642 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
644 # for packages of SALOME applications including KERNEL,
645 # we produce a salome launcher or a virtual application (depending on salome version)
646 if 'KERNEL' in config.APPLICATION.products:
647 VersionSalome = src.get_salome_version(config)
648 # Case where SALOME has the launcher that uses the SalomeContext API
649 if VersionSalome >= 730:
650 # create the relative launcher and add it to the files to add
651 launcher_name = src.get_launcher_name(config)
652 launcher_package = produce_relative_launcher(config,
657 d_products["launcher"] = (launcher_package, launcher_name)
659 # if the application contains mesa products, we generate in addition to the
660 # classical salome launcher a launcher using mesa and called mesa_salome
661 # (the mesa launcher will be used for remote usage through ssh).
662 if generate_mesa_launcher:
663 #if there is one : store the use_mesa property
664 restore_use_mesa_option=None
665 if ('properties' in config.APPLICATION and
666 'use_mesa' in config.APPLICATION.properties):
667 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
669 # activate mesa property, and generate a mesa launcher
670 src.activate_mesa_property(config) #activate use_mesa property
671 launcher_mesa_name="mesa_"+launcher_name
672 launcher_package_mesa = produce_relative_launcher(config,
677 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
679 # if there was a use_mesa value, we restore it
680 # else we set it to the default value "no"
681 if restore_use_mesa_option != None:
682 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
684 config.APPLICATION.properties.use_mesa="no"
687 # if we mix binaries and sources, we add a copy of the launcher,
688 # prefixed with "bin",in order to avoid clashes
689 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
691 # Provide a script for the creation of an application EDF style
692 appli_script = product_appli_creation_script(config,
697 d_products["appli script"] = (appli_script, "create_appli.py")
699 # Put also the environment file
700 env_file = produce_relative_env_files(config,
705 if src.architecture.is_windows():
706 filename = "env_launch.bat"
708 filename = "env_launch.sh"
709 d_products["environment file"] = (env_file, filename)
712 def source_package(sat, config, logger, options, tmp_working_dir):
713 '''Prepare a dictionary that stores all the needed directories and files to
714 add in a source package.
716 :param config Config: The global configuration.
717 :param logger Logger: the logging instance
718 :param options OptResult: the options of the launched command
719 :param tmp_working_dir str: The temporary local directory containing some
720 specific directories or files needed in the
722 :return: the dictionary that stores all the needed directories and files to
723 add in a source package.
724 {label : (path_on_local_machine, path_in_archive)}
729 # Get all the products that are prepared using an archive
730 # unless ftp mode is specified (in this case the user of the
731 # archive will get the sources through the ftp mode of sat prepare
733 logger.write("Find archive products ... ")
734 d_archives, l_pinfo_vcs = get_archives(config, logger)
735 logger.write("Done\n")
738 if not options.with_vcs and len(l_pinfo_vcs) > 0:
739 # Make archives with the products that are not prepared using an archive
740 # (git, cvs, svn, etc)
741 logger.write("Construct archives for vcs products ... ")
742 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
747 logger.write("Done\n")
750 logger.write("Create the project ... ")
751 d_project = create_project_for_src_package(config,
755 logger.write("Done\n")
758 tmp_sat = add_salomeTools(config, tmp_working_dir)
759 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
761 # Add a sat symbolic link if not win
762 if not src.architecture.is_windows():
763 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
767 # In the jobs, os.getcwd() can fail
768 t = config.LOCAL.workdir
769 os.chdir(tmp_working_dir)
770 if os.path.lexists(tmp_satlink_path):
771 os.remove(tmp_satlink_path)
772 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
775 d_sat["sat link"] = (tmp_satlink_path, "sat")
777 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
780 def get_archives(config, logger):
781 '''Find all the products that are get using an archive and all the products
782 that are get using a vcs (git, cvs, svn) repository.
784 :param config Config: The global configuration.
785 :param logger Logger: the logging instance
786 :return: the dictionary {name_product :
787 (local path of its archive, path in the package of its archive )}
788 and the list of specific configuration corresponding to the vcs
792 # Get the list of product informations
793 l_products_name = config.APPLICATION.products.keys()
794 l_product_info = src.product.get_products_infos(l_products_name,
798 for p_name, p_info in l_product_info:
799 # skip product with property not_in_package set to yes
800 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
802 # ignore the native and fixed products
803 if (src.product.product_is_native(p_info)
804 or src.product.product_is_fixed(p_info)):
806 if p_info.get_source == "archive":
807 archive_path = p_info.archive_info.archive_name
808 archive_name = os.path.basename(archive_path)
809 d_archives[p_name] = (archive_path,
810 os.path.join(ARCHIVE_DIR, archive_name))
811 if (src.appli_test_property(config,"pip", "yes") and
812 src.product.product_test_property(p_info,"pip", "yes")):
813 # if pip mode is activated, and product is managed by pip
814 pip_wheels_dir=os.path.join(config.LOCAL.archive_dir,"wheels")
815 pip_wheel_pattern=os.path.join(pip_wheels_dir,
816 "%s-%s*" % (p_info.name, p_info.version))
817 pip_wheel_path=glob.glob(pip_wheel_pattern)
818 msg_pip_not_found="Error in get_archive, pip wheel for "\
819 "product %s-%s was not found in %s directory"
820 msg_pip_two_or_more="Error in get_archive, several pip wheels for "\
821 "product %s-%s were found in %s directory"
822 if len(pip_wheel_path)==0:
823 raise src.SatException(msg_pip_not_found %\
824 (p_info.name, p_info.version, pip_wheels_dir))
825 if len(pip_wheel_path)>1:
826 raise src.SatException(msg_pip_two_or_more %\
827 (p_info.name, p_info.version, pip_wheels_dir))
829 pip_wheel_name=os.path.basename(pip_wheel_path[0])
830 d_archives[p_name+" (pip wheel)"]=(pip_wheel_path[0],
831 os.path.join(ARCHIVE_DIR, "wheels", pip_wheel_name))
833 # this product is not managed by archive,
834 # an archive of the vcs directory will be created by get_archive_vcs
835 l_pinfo_vcs.append((p_name, p_info))
837 return d_archives, l_pinfo_vcs
839 def add_salomeTools(config, tmp_working_dir):
840 '''Prepare a version of salomeTools that has a specific local.pyconf file
841 configured for a source package.
843 :param config Config: The global configuration.
844 :param tmp_working_dir str: The temporary local directory containing some
845 specific directories or files needed in the
847 :return: The path to the local salomeTools directory to add in the package
850 # Copy sat in the temporary working directory
851 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
852 sat_running_path = src.Path(config.VARS.salometoolsway)
853 sat_running_path.copy(sat_tmp_path)
855 # Update the local.pyconf file that contains the path to the project
856 local_pyconf_name = "local.pyconf"
857 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
858 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
859 # Remove the .pyconf file in the root directory of salomeTools if there is
860 # any. (For example when launching jobs, a pyconf file describing the jobs
861 # can be here and is not useful)
862 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
863 for file_or_dir in files_or_dir_SAT:
864 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
865 file_path = os.path.join(tmp_working_dir,
870 ff = open(local_pyconf_file, "w")
871 ff.write(LOCAL_TEMPLATE)
874 return sat_tmp_path.path
876 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
877 '''For sources package that require that all products are get using an
878 archive, one has to create some archive for the vcs products.
879 So this method calls the clean and source command of sat and then create
882 :param l_pinfo_vcs List: The list of specific configuration corresponding to
884 :param sat Sat: The Sat instance that can be called to clean and source the
886 :param config Config: The global configuration.
887 :param logger Logger: the logging instance
888 :param tmp_working_dir str: The temporary local directory containing some
889 specific directories or files needed in the
891 :return: the dictionary that stores all the archives to add in the source
892 package. {label : (path_on_local_machine, path_in_archive)}
895 # clean the source directory of all the vcs products, then use the source
896 # command and thus construct an archive that will not contain the patches
897 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
898 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
899 logger.write(_("\nclean sources\n"))
900 args_clean = config.VARS.application
901 args_clean += " --sources --products "
902 args_clean += ",".join(l_prod_names)
903 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
904 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
907 logger.write(_("get sources\n"))
908 args_source = config.VARS.application
909 args_source += " --products "
910 args_source += ",".join(l_prod_names)
911 svgDir = sat.cfg.APPLICATION.workdir
912 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
913 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
914 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
915 # DBG.write("sat config id", id(sat.cfg), True)
916 # shit as config is not same id() as for sat.source()
917 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
919 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
921 # make the new archives
923 for pn, pinfo in l_pinfo_vcs:
924 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
925 logger.write("make archive vcs '%s'\n" % path_archive)
926 d_archives_vcs[pn] = (path_archive,
927 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
928 sat.cfg.APPLICATION.workdir = svgDir
929 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
930 return d_archives_vcs
932 def make_archive(prod_name, prod_info, where):
933 '''Create an archive of a product by searching its source directory.
935 :param prod_name str: The name of the product.
936 :param prod_info Config: The specific configuration corresponding to the
938 :param where str: The path of the repository where to put the resulting
940 :return: The path of the resulting archive
943 path_targz_prod = os.path.join(where, prod_name + PACKAGE_EXT)
944 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
945 local_path = prod_info.source_dir
946 tar_prod.add(local_path,
948 exclude=exclude_VCS_and_extensions)
950 return path_targz_prod
952 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
953 '''Create a specific project for a source package.
955 :param config Config: The global configuration.
956 :param tmp_working_dir str: The temporary local directory containing some
957 specific directories or files needed in the
959 :param with_vcs boolean: True if the package is with vcs products (not
960 transformed into archive products)
961 :param with_ftp boolean: True if the package use ftp servers to get archives
962 :return: The dictionary
963 {"project" : (produced project, project path in the archive)}
967 # Create in the working temporary directory the full project tree
968 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
969 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
971 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
974 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
977 patches_tmp_dir = os.path.join(project_tmp_dir,
980 application_tmp_dir = os.path.join(project_tmp_dir,
982 for directory in [project_tmp_dir,
983 compil_scripts_tmp_dir,
986 application_tmp_dir]:
987 src.ensure_path_exists(directory)
989 # Create the pyconf that contains the information of the project
990 project_pyconf_name = "project.pyconf"
991 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
992 ff = open(project_pyconf_file, "w")
993 ff.write(PROJECT_TEMPLATE)
994 if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
995 ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
996 for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
997 ftp_path=ftp_path+":"+ftpserver
999 ff.write("# ftp servers where to search for prerequisite archives\n")
1001 # add licence paths if any
1002 if len(config.PATHS.LICENCEPATH) > 0:
1003 licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
1004 for path in config.PATHS.LICENCEPATH[1:]:
1005 licence_path=licence_path+":"+path
1007 ff.write("\n# Where to search for licences\n")
1008 ff.write(licence_path)
1013 # Loop over the products to get there pyconf and all the scripts
1014 # (compilation, environment, patches)
1015 # and create the pyconf file to add to the project
1016 lproducts_name = config.APPLICATION.products.keys()
1017 l_products = src.product.get_products_infos(lproducts_name, config)
1018 for p_name, p_info in l_products:
1019 # skip product with property not_in_package set to yes
1020 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
1022 find_product_scripts_and_pyconf(p_name,
1026 compil_scripts_tmp_dir,
1027 env_scripts_tmp_dir,
1029 products_pyconf_tmp_dir)
1031 # for the application pyconf, we write directly the config
1032 # don't search for the original pyconf file
1033 # to avoid problems with overwrite sections and rm_products key
1034 write_application_pyconf(config, application_tmp_dir)
1036 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
1039 def find_product_scripts_and_pyconf(p_name,
1043 compil_scripts_tmp_dir,
1044 env_scripts_tmp_dir,
1046 products_pyconf_tmp_dir):
1047 '''Create a specific pyconf file for a given product. Get its environment
1048 script, its compilation script and patches and put it in the temporary
1049 working directory. This method is used in the source package in order to
1050 construct the specific project.
1052 :param p_name str: The name of the product.
1053 :param p_info Config: The specific configuration corresponding to the
1055 :param config Config: The global configuration.
1056 :param with_vcs boolean: True if the package is with vcs products (not
1057 transformed into archive products)
1058 :param compil_scripts_tmp_dir str: The path to the temporary compilation
1059 scripts directory of the project.
1060 :param env_scripts_tmp_dir str: The path to the temporary environment script
1061 directory of the project.
1062 :param patches_tmp_dir str: The path to the temporary patch scripts
1063 directory of the project.
1064 :param products_pyconf_tmp_dir str: The path to the temporary product
1065 scripts directory of the project.
1068 # read the pyconf of the product
1069 product_pyconf_cfg = src.pyconf.Config(p_info.from_file)
1071 # find the compilation script if any
1072 if src.product.product_has_script(p_info):
1073 compil_script_path = src.Path(p_info.compil_script)
1074 compil_script_path.copy(compil_scripts_tmp_dir)
1076 # find the environment script if any
1077 if src.product.product_has_env_script(p_info):
1078 env_script_path = src.Path(p_info.environ.env_script)
1079 env_script_path.copy(env_scripts_tmp_dir)
1081 # find the patches if any
1082 if src.product.product_has_patches(p_info):
1083 patches = src.pyconf.Sequence()
1084 for patch_path in p_info.patches:
1085 p_path = src.Path(patch_path)
1086 p_path.copy(patches_tmp_dir)
1087 patches.append(os.path.basename(patch_path), "")
1089 if (not with_vcs) and src.product.product_is_vcs(p_info):
1090 # in non vcs mode, if the product is not archive, then make it become archive.
1092 # depending upon the incremental mode, select impacted sections
1093 if "properties" in p_info and "incremental" in p_info.properties and\
1094 p_info.properties.incremental == "yes":
1095 sections = ["default", "default_win", p_info.section, p_info.section+"_win"]
1097 sections = [p_info.section]
1098 for section in sections:
1099 if section in product_pyconf_cfg and "get_source" in product_pyconf_cfg[section]:
1100 DBG.write("sat package set archive mode to archive for product %s and section %s" %\
1102 product_pyconf_cfg[section].get_source = "archive"
1103 if not "archive_info" in product_pyconf_cfg[section]:
1104 product_pyconf_cfg[section].addMapping("archive_info",
1105 src.pyconf.Mapping(product_pyconf_cfg),
1107 product_pyconf_cfg[section].archive_info.archive_name =\
1108 p_info.name + ".tgz"
1110 # write the pyconf file to the temporary project location
1111 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1113 ff = open(product_tmp_pyconf_path, 'w')
1114 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1115 product_pyconf_cfg.__save__(ff, 1)
1119 def write_application_pyconf(config, application_tmp_dir):
1120 '''Write the application pyconf file in the specific temporary
1121 directory containing the specific project of a source package.
1123 :param config Config: The global configuration.
1124 :param application_tmp_dir str: The path to the temporary application
1125 scripts directory of the project.
1127 application_name = config.VARS.application
1128 # write the pyconf file to the temporary application location
1129 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1130 application_name + ".pyconf")
1131 with open(application_tmp_pyconf_path, 'w') as f:
1132 f.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1133 res = src.pyconf.Config()
1134 app = src.pyconf.deepCopyMapping(config.APPLICATION)
1135 # no base in packages
1138 # Change the workdir
1139 app.workdir = src.pyconf.Reference(
1142 'VARS.salometoolsway + $VARS.sep + ".."')
1143 res.addMapping("APPLICATION", app, "")
1144 res.__save__(f, evaluated=False)
1147 def sat_package(config, tmp_working_dir, options, logger):
1148 '''Prepare a dictionary that stores all the needed directories and files to
1149 add in a salomeTool package.
1151 :param tmp_working_dir str: The temporary local working directory
1152 :param options OptResult: the options of the launched command
1153 :return: the dictionary that stores all the needed directories and files to
1154 add in a salomeTool package.
1155 {label : (path_on_local_machine, path_in_archive)}
1160 # we include sat himself
1161 d_project["all_sat"]=(config.VARS.salometoolsway, "")
1163 # and we overwrite local.pyconf with a clean wersion.
1164 local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1165 local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1166 local_cfg = src.pyconf.Config(local_file_path)
1167 local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1168 local_cfg.LOCAL["base"] = "default"
1169 local_cfg.LOCAL["workdir"] = "default"
1170 local_cfg.LOCAL["log_dir"] = "default"
1171 local_cfg.LOCAL["archive_dir"] = "default"
1172 local_cfg.LOCAL["VCS"] = "None"
1173 local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1175 # if the archive contains a project, we write its relative path in local.pyconf
1177 project_arch_path = os.path.join("projects", options.project,
1178 os.path.basename(options.project_file_path))
1179 local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1181 ff = open(local_pyconf_tmp_path, 'w')
1182 local_cfg.__save__(ff, 1)
1184 d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1188 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1189 '''Prepare a dictionary that stores all the needed directories and files to
1190 add in a project package.
1192 :param project_file_path str: The path to the local project.
1193 :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1194 :param tmp_working_dir str: The temporary local directory containing some
1195 specific directories or files needed in the
1197 :param embedded_in_sat boolean : the project package is embedded in a sat package
1198 :return: the dictionary that stores all the needed directories and files to
1199 add in a project package.
1200 {label : (path_on_local_machine, path_in_archive)}
1204 # Read the project file and get the directories to add to the package
1207 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1210 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1211 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1212 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1214 paths = {"APPLICATIONPATH" : "applications",
1215 "PRODUCTPATH" : "products",
1217 "MACHINEPATH" : "machines"}
1219 paths["ARCHIVEPATH"] = "archives"
1221 # Loop over the project paths and add it
1222 project_file_name = os.path.basename(project_file_path)
1224 if path not in project_pyconf_cfg:
1227 dest_path = os.path.join("projects", name_project, paths[path])
1228 project_file_dest = os.path.join("projects", name_project, project_file_name)
1230 dest_path = paths[path]
1231 project_file_dest = project_file_name
1233 # Add the directory to the files to add in the package
1234 d_project[path] = (project_pyconf_cfg[path], dest_path)
1236 # Modify the value of the path in the package
1237 project_pyconf_cfg[path] = src.pyconf.Reference(
1240 'project_path + "/' + paths[path] + '"')
1242 # Modify some values
1243 if "project_path" not in project_pyconf_cfg:
1244 project_pyconf_cfg.addMapping("project_path",
1245 src.pyconf.Mapping(project_pyconf_cfg),
1247 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1250 # we don't want to export these two fields
1251 project_pyconf_cfg.__delitem__("file_path")
1252 project_pyconf_cfg.__delitem__("PWD")
1254 project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1256 # Write the project pyconf file
1257 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1258 ff = open(project_pyconf_tmp_path, 'w')
1259 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1260 project_pyconf_cfg.__save__(ff, 1)
1262 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1266 def add_readme(config, options, where):
1267 readme_path = os.path.join(where, "README")
1268 with codecs.open(readme_path, "w", 'utf-8') as f:
1270 # templates for building the header
1272 # This package was generated with sat $version
1275 # Distribution : $dist
1277 In the following, $$ROOT represents the directory where you have installed
1278 SALOME (the directory where this file is located).
1281 if src.architecture.is_windows():
1282 readme_header = readme_header.replace('$$ROOT','%ROOT%')
1283 readme_compilation_with_binaries="""
1285 compilation based on the binaries used as prerequisites
1286 =======================================================
1288 If you fail to compile the complete application (for example because
1289 you are not root on your system and cannot install missing packages), you
1290 may try a partial compilation based on the binaries.
1291 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1292 and do some substitutions on cmake and .la files (replace the build directories
1294 The procedure to do it is:
1295 1) Remove or rename INSTALL directory if it exists
1296 2) Execute the shell script install_bin.sh:
1299 3) Use SalomeTool (as explained in Sources section) and compile only the
1300 modules you need to (with -p option)
1303 readme_header_tpl=string.Template(readme_header)
1304 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1305 "README_BIN.template")
1306 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1307 "README_LAUNCHER.template")
1308 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1309 "README_BIN_VIRTUAL_APP.template")
1310 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1311 "README_SRC.template")
1312 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1313 "README_PROJECT.template")
1314 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1315 "README_SAT.template")
1317 # prepare substitution dictionary
1319 d['user'] = config.VARS.user
1320 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1321 d['version'] = src.get_salometool_version(config)
1322 d['dist'] = config.VARS.dist
1323 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1325 if options.binaries or options.sources:
1326 d['application'] = config.VARS.application
1327 d['BINARIES'] = config.INTERNAL.config.binary_dir
1328 d['SEPARATOR'] = config.VARS.sep
1329 if src.architecture.is_windows():
1330 d['operatingSystem'] = 'Windows'
1331 d['PYTHON3'] = 'python3'
1332 d['ROOT'] = '%ROOT%'
1334 d['operatingSystem'] = 'Linux'
1337 f.write("# Application: " + d['application'] + "\n")
1338 if 'KERNEL' in config.APPLICATION.products:
1339 VersionSalome = src.get_salome_version(config)
1340 # Case where SALOME has the launcher that uses the SalomeContext API
1341 if VersionSalome >= 730:
1342 d['launcher'] = config.APPLICATION.profile.launcher_name
1344 d['virtual_app'] = 'runAppli' # this info is not used now)
1346 # write the specific sections
1347 if options.binaries:
1348 f.write(src.template.substitute(readme_template_path_bin, d))
1349 if "virtual_app" in d:
1350 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1352 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1355 f.write(src.template.substitute(readme_template_path_src, d))
1357 if options.binaries and options.sources and not src.architecture.is_windows():
1358 f.write(readme_compilation_with_binaries)
1361 f.write(src.template.substitute(readme_template_path_pro, d))
1364 f.write(src.template.substitute(readme_template_path_sat, d))
1368 def update_config(config, logger, prop, value):
1369 '''Remove from config.APPLICATION.products the products that have the property given as input.
1371 :param config Config: The global config.
1372 :param prop str: The property to filter
1373 :param value str: The value of the property to filter
1375 # if there is no APPLICATION (ex sat package -t) : nothing to do
1376 if "APPLICATION" in config:
1377 l_product_to_remove = []
1378 for product_name in config.APPLICATION.products.keys():
1379 prod_cfg = src.product.get_product_config(config, product_name)
1380 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1381 l_product_to_remove.append(product_name)
1382 for product_name in l_product_to_remove:
1383 config.APPLICATION.products.__delitem__(product_name)
1384 logger.write("Remove product %s with property %s\n" % (product_name, prop), 5)
1387 '''method that is called when salomeTools is called with --help option.
1389 :return: The text to display for the package command description.
1393 The package command creates a tar file archive of a product.
1394 There are four kinds of archive, which can be mixed:
1396 1 - The binary archive.
1397 It contains the product installation directories plus a launcher.
1398 2 - The sources archive.
1399 It contains the product archives, a project (the application plus salomeTools).
1400 3 - The project archive.
1401 It contains a project (give the project file path as argument).
1402 4 - The salomeTools archive.
1403 It contains code utility salomeTools.
1406 >> sat package SALOME-master --binaries --sources""")
1408 def run(args, runner, logger):
1409 '''method that is called when salomeTools is called with package parameter.
1413 (options, args) = parser.parse_args(args)
1415 # Check that a type of package is called, and only one
1416 all_option_types = (options.binaries,
1418 options.project not in ["", None],
1421 # Check if no option for package type
1422 if all_option_types.count(True) == 0:
1423 msg = _("Error: Precise a type for the package\nUse one of the "
1424 "following options: --binaries, --sources, --project or"
1426 logger.write(src.printcolors.printcError(msg), 1)
1427 logger.write("\n", 1)
1430 # The repository where to put the package if not Binary or Source
1431 package_default_path = runner.cfg.LOCAL.workdir
1433 # if the package contains binaries or sources:
1434 if options.binaries or options.sources:
1435 # Check that the command has been called with an application
1436 src.check_config_has_application(runner.cfg)
1438 # Display information
1439 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1440 runner.cfg.VARS.application), 1)
1442 # Get the default directory where to put the packages
1443 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1444 src.ensure_path_exists(package_default_path)
1446 # if the package contains a project:
1448 # check that the project is visible by SAT
1449 projectNameFile = options.project + ".pyconf"
1451 for i in runner.cfg.PROJECTS.project_file_paths:
1452 baseName = os.path.basename(i)
1453 if baseName == projectNameFile:
1457 if foundProject is None:
1458 local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1459 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1463 Please add it in file:
1465 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1466 logger.write(src.printcolors.printcError(msg), 1)
1467 logger.write("\n", 1)
1470 options.project_file_path = foundProject
1471 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1473 # Remove the products that are filtered by the --without_properties option
1474 if options.without_properties:
1475 prop, value = options.without_properties
1476 update_config(runner.cfg, logger, prop, value)
1478 # Remove from config the products that have the not_in_package property
1479 update_config(runner.cfg, logger, "not_in_package", "yes")
1481 # for binary packages without sources, remove compile time products
1482 if options.binaries and (not options.sources):
1483 update_config(runner.cfg, logger, "compile_time", "yes")
1485 # get the name of the archive or build it
1487 if os.path.basename(options.name) == options.name:
1488 # only a name (not a path)
1489 archive_name = options.name
1490 dir_name = package_default_path
1492 archive_name = os.path.basename(options.name)
1493 dir_name = os.path.dirname(options.name)
1495 # suppress extension
1496 if archive_name[-len(".tgz"):] == ".tgz":
1497 archive_name = archive_name[:-len(".tgz")]
1498 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1499 archive_name = archive_name[:-len(".tar.gz")]
1503 dir_name = package_default_path
1504 if options.binaries or options.sources:
1505 archive_name = runner.cfg.APPLICATION.name
1507 if options.binaries:
1508 archive_name += "-"+runner.cfg.VARS.dist
1511 archive_name += "-SRC"
1512 if options.with_vcs:
1513 archive_name += "-VCS"
1516 archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1521 archive_name += ("satproject_" + options.project)
1523 if len(archive_name)==0: # no option worked
1524 msg = _("Error: Cannot name the archive\n"
1525 " check if at least one of the following options was "
1526 "selected : --binaries, --sources, --project or"
1528 logger.write(src.printcolors.printcError(msg), 1)
1529 logger.write("\n", 1)
1532 path_targz = os.path.join(dir_name, archive_name + PACKAGE_EXT)
1534 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1536 # Create a working directory for all files that are produced during the
1537 # package creation and that will be removed at the end of the command
1538 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1539 src.ensure_path_exists(tmp_working_dir)
1540 logger.write("\n", 5)
1541 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1543 logger.write("\n", 3)
1545 msg = _("Preparation of files to add to the archive")
1546 logger.write(src.printcolors.printcLabel(msg), 2)
1547 logger.write("\n", 2)
1549 d_files_to_add={} # content of the archive
1551 # a dict to hold paths that will need to be substitute for users recompilations
1552 d_paths_to_substitute={}
1554 if options.binaries:
1555 d_bin_files_to_add = binary_package(runner.cfg,
1559 # for all binaries dir, store the substitution that will be required
1560 # for extra compilations
1561 for key in d_bin_files_to_add:
1562 if key.endswith("(bin)"):
1563 source_dir = d_bin_files_to_add[key][0]
1564 path_in_archive = d_bin_files_to_add[key][1].replace(
1565 runner.cfg.INTERNAL.config.binary_dir + runner.cfg.VARS.dist,
1566 runner.cfg.INTERNAL.config.install_dir)
1567 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1568 # if basename is the same we will just substitute the dirname
1569 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1570 os.path.dirname(path_in_archive)
1572 d_paths_to_substitute[source_dir]=path_in_archive
1574 d_files_to_add.update(d_bin_files_to_add)
1576 d_files_to_add.update(source_package(runner,
1581 if options.binaries:
1582 # for archives with bin and sources we provide a shell script able to
1583 # install binaries for compilation
1584 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1586 d_paths_to_substitute,
1588 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1589 logger.write("substitutions that need to be done later : \n", 5)
1590 logger.write(str(d_paths_to_substitute), 5)
1591 logger.write("\n", 5)
1593 # --salomeTool option is not considered when --sources is selected, as this option
1594 # already brings salomeTool!
1596 d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir,
1600 DBG.write("config for package %s" % options.project, runner.cfg)
1601 d_files_to_add.update(project_package(runner.cfg, options.project, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1603 if not(d_files_to_add):
1604 msg = _("Error: Empty dictionnary to build the archive!\n")
1605 logger.write(src.printcolors.printcError(msg), 1)
1606 logger.write("\n", 1)
1609 # Add the README file in the package
1610 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1611 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1613 # Add the additional files of option add_files
1614 if options.add_files:
1615 for file_path in options.add_files:
1616 if not os.path.exists(file_path):
1617 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1619 file_name = os.path.basename(file_path)
1620 d_files_to_add[file_name] = (file_path, file_name)
1622 logger.write("\n", 2)
1623 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1624 logger.write("\n", 2)
1625 logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1629 # Creating the object tarfile
1630 tar = tarfile.open(path_targz, mode='w:gz')
1632 # get the filtering function if needed
1633 filter_function = exclude_VCS_and_extensions
1635 # Add the files to the tarfile object
1636 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1638 except KeyboardInterrupt:
1639 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1640 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1641 # remove the working directory
1642 shutil.rmtree(tmp_working_dir)
1643 logger.write(_("OK"), 1)
1644 logger.write(_("\n"), 1)
1647 # case if no application, only package sat as 'sat package -t'
1649 app = runner.cfg.APPLICATION
1653 # unconditionaly remove the tmp_local_working_dir
1655 tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1656 if os.path.isdir(tmp_local_working_dir):
1657 shutil.rmtree(tmp_local_working_dir)
1659 # remove the tmp directory, unless user has registered as developer
1660 if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1661 shutil.rmtree(tmp_working_dir)
1663 # Print again the path of the package
1664 logger.write("\n", 2)
1665 src.printcolors.print_value(logger, "Package path", path_targz, 2)