3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
30 from application import get_SALOME_modules
31 import src.debug as DBG
38 ARCHIVE_DIR = "ARCHIVES"
39 PROJECT_DIR = "PROJECT"
41 IGNORED_DIRS = [".git", ".svn"]
42 IGNORED_EXTENSIONS = []
44 PROJECT_TEMPLATE = """#!/usr/bin/env python
47 # The path to the archive root directory
48 root_path : $PWD + "/../"
50 project_path : $PWD + "/"
52 # Where to search the archives of the products
53 ARCHIVEPATH : $root_path + "ARCHIVES"
54 # Where to search the pyconf of the applications
55 APPLICATIONPATH : $project_path + "applications/"
56 # Where to search the pyconf of the products
57 PRODUCTPATH : $project_path + "products/"
58 # Where to search the pyconf of the jobs of the project
59 JOBPATH : $project_path + "jobs/"
60 # Where to search the pyconf of the machines of the project
61 MACHINEPATH : $project_path + "machines/"
64 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
72 archive_dir : 'default'
79 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
80 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
84 # Define all possible option for the package command : sat package <options>
85 parser = src.options.Options()
86 parser.add_option('b', 'binaries', 'boolean', 'binaries',
87 _('Optional: Produce a binary package.'), False)
88 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
89 _('Optional: Only binary package: produce the archive even if '
90 'there are some missing products.'), False)
91 parser.add_option('s', 'sources', 'boolean', 'sources',
92 _('Optional: Produce a compilable archive of the sources of the '
93 'application.'), False)
94 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
95 _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
96 'Sat prepare will use VCS mode instead to retrieve them'),
98 parser.add_option('', 'ftp', 'boolean', 'ftp',
99 _('Optional: Do not embed archives for products in archive mode.'
100 'Sat prepare will use ftp instead to retrieve them'),
102 parser.add_option('p', 'project', 'string', 'project',
103 _('Optional: Produce an archive that contains a project.'), "")
104 parser.add_option('t', 'salometools', 'boolean', 'sat',
105 _('Optional: Produce an archive that contains salomeTools.'), False)
106 parser.add_option('n', 'name', 'string', 'name',
107 _('Optional: The name or full path of the archive.'), None)
108 parser.add_option('', 'add_files', 'list2', 'add_files',
109 _('Optional: The list of additional files to add to the archive.'), [])
110 parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
111 _('Optional: do not add commercial licence.'), False)
112 parser.add_option('', 'without_properties', 'properties', 'without_properties',
113 _('Optional: Filter the products by their properties.\n\tSyntax: '
114 '--without_properties <property>:<value>'))
117 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
118 '''Create an archive containing all directories and files that are given in
119 the d_content argument.
121 :param tar tarfile: The tarfile instance used to make the archive.
122 :param name_archive str: The name of the archive to make.
123 :param d_content dict: The dictionary that contain all directories and files
124 to add in the archive.
126 (path_on_local_machine, path_in_archive)
127 :param logger Logger: the logging instance
128 :param f_exclude Function: the function that filters
129 :return: 0 if success, 1 if not.
132 # get the max length of the messages in order to make the display
133 max_len = len(max(d_content.keys(), key=len))
136 # loop over each directory or file stored in the d_content dictionary
137 names = sorted(d_content.keys())
138 DBG.write("add tar names", names)
141 # display information
142 len_points = max_len - len(name) + 3
143 local_path, archive_path = d_content[name]
144 in_archive = os.path.join(name_archive, archive_path)
145 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
146 # Get the local path and the path in archive
147 # of the directory or file to add
148 # Add it in the archive
150 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
151 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
152 except Exception as e:
153 logger.write(src.printcolors.printcError(_("KO ")), 3)
154 logger.write(str(e), 3)
156 logger.write("\n", 3)
159 def exclude_VCS_and_extensions(filename):
160 ''' The function that is used to exclude from package the link to the
161 VCS repositories (like .git)
163 :param filename Str: The filname to exclude (or not).
164 :return: True if the file has to be exclude
167 for dir_name in IGNORED_DIRS:
168 if dir_name in filename:
170 for extension in IGNORED_EXTENSIONS:
171 if filename.endswith(extension):
175 def produce_relative_launcher(config,
180 with_commercial=True):
181 '''Create a specific SALOME launcher for the binary package. This launcher
184 :param config Config: The global configuration.
185 :param logger Logger: the logging instance
186 :param file_dir str: the directory where to put the launcher
187 :param file_name str: The launcher name
188 :param binaries_dir_name str: the name of the repository where the binaries
190 :return: the path of the produced launcher
194 # get KERNEL installation path
195 kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
197 # set kernel bin dir (considering fhs property)
198 kernel_cfg = src.product.get_product_config(config, "KERNEL")
199 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
200 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
202 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
204 # check if the application contains an application module
205 # check also if the application has a distene product,
206 # in this case get its licence file name
207 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
208 salome_application_name="Not defined"
209 distene_licence_file_name=False
210 for prod_name, prod_info in l_product_info:
211 # look for a "salome application" and a distene product
212 if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
213 distene_licence_file_name = src.product.product_has_licence(prod_info,
214 config.PATHS.LICENCEPATH)
215 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
216 salome_application_name=prod_info.name
218 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
219 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
220 if salome_application_name == "Not defined":
221 app_root_dir=kernel_root_dir
223 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
225 # Get the launcher template and do substitutions
226 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
227 withProfile = src.fileEnviron.withProfile3
229 withProfile = src.fileEnviron.withProfile
231 withProfile = withProfile.replace(
232 "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
233 "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + app_root_dir + "'")
234 withProfile = withProfile.replace(
235 " 'BIN_KERNEL_INSTALL_DIR'",
236 " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
238 before, after = withProfile.split("# here your local standalone environment\n")
240 # create an environment file writer
241 writer = src.environment.FileEnvWriter(config,
246 filepath = os.path.join(file_dir, file_name)
247 # open the file and write into it
248 launch_file = open(filepath, "w")
249 launch_file.write(before)
251 writer.write_cfgForPy_file(launch_file,
252 for_package = binaries_dir_name,
253 with_commercial=with_commercial)
254 launch_file.write(after)
257 # Little hack to put out_dir_Path outside the strings
258 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
260 # A hack to put a call to a file for distene licence.
261 # It does nothing to an application that has no distene product
262 if distene_licence_file_name:
263 logger.write("Application has a distene licence file! We use it in package launcher", 5)
264 hack_for_distene_licence(filepath, distene_licence_file_name)
266 # change the rights in order to make the file executable for everybody
278 def hack_for_distene_licence(filepath, licence_file):
279 '''Replace the distene licence env variable by a call to a file.
281 :param filepath Str: The path to the launcher to modify.
283 shutil.move(filepath, filepath + "_old")
285 filein = filepath + "_old"
286 fin = open(filein, "r")
287 fout = open(fileout, "w")
288 text = fin.readlines()
289 # Find the Distene section
291 for i,line in enumerate(text):
292 if "# Set DISTENE License" in line:
296 # No distene product, there is nothing to do
302 del text[num_line +1]
303 del text[num_line +1]
304 text_to_insert =""" try:
305 distene_licence_file="%s"
306 if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
307 import importlib.util
308 spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
309 distene=importlib.util.module_from_spec(spec_dist)
310 spec_dist.loader.exec_module(distene)
313 distene = imp.load_source('distene_licence', distene_licence_file)
314 distene.set_distene_variables(context)
316 pass\n""" % licence_file
317 text.insert(num_line + 1, text_to_insert)
324 def produce_relative_env_files(config,
328 '''Create some specific environment files for the binary package. These
329 files use relative paths.
331 :param config Config: The global configuration.
332 :param logger Logger: the logging instance
333 :param file_dir str: the directory where to put the files
334 :param binaries_dir_name str: the name of the repository where the binaries
336 :return: the list of path of the produced environment files
339 # create an environment file writer
340 writer = src.environment.FileEnvWriter(config,
345 if src.architecture.is_windows():
347 filename = "env_launch.bat"
350 filename = "env_launch.sh"
353 filepath = writer.write_env_file(filename,
356 for_package = binaries_dir_name)
358 # Little hack to put out_dir_Path as environment variable
359 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
361 # change the rights in order to make the file executable for everybody
373 def produce_install_bin_file(config,
378 '''Create a bash shell script which do substitutions in BIRARIES dir
379 in order to use it for extra compilations.
381 :param config Config: The global configuration.
382 :param logger Logger: the logging instance
383 :param file_dir str: the directory where to put the files
384 :param d_sub, dict: the dictionnary that contains the substitutions to be done
385 :param file_name str: the name of the install script file
386 :return: the produced file
390 filepath = os.path.join(file_dir, file_name)
391 # open the file and write into it
392 # use codec utf-8 as sat variables are in unicode
393 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
394 installbin_template_path = os.path.join(config.VARS.internal_dir,
395 "INSTALL_BIN.template")
397 # build the name of the directory that will contain the binaries
398 binaries_dir_name = "BINARIES-" + config.VARS.dist
399 # build the substitution loop
400 loop_cmd = "for f in $(grep -RIl"
402 loop_cmd += " -e "+ key
403 loop_cmd += ' INSTALL); do\n sed -i "\n'
405 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
406 loop_cmd += ' " $f\ndone'
409 d["BINARIES_DIR"] = binaries_dir_name
410 d["SUBSTITUTION_LOOP"]=loop_cmd
412 # substitute the template and write it in file
413 content=src.template.substitute(installbin_template_path, d)
414 installbin_file.write(content)
415 # change the rights in order to make the file executable for everybody
427 def product_appli_creation_script(config,
431 '''Create a script that can produce an application (EDF style) in the binary
434 :param config Config: The global configuration.
435 :param logger Logger: the logging instance
436 :param file_dir str: the directory where to put the file
437 :param binaries_dir_name str: the name of the repository where the binaries
439 :return: the path of the produced script file
442 template_name = "create_appli.py.for_bin_packages.template"
443 template_path = os.path.join(config.VARS.internal_dir, template_name)
444 text_to_fill = open(template_path, "r").read()
445 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
446 '"' + binaries_dir_name + '"')
449 for product_name in get_SALOME_modules(config):
450 product_info = src.product.get_product_config(config, product_name)
452 if src.product.product_is_smesh_plugin(product_info):
455 if 'install_dir' in product_info and bool(product_info.install_dir):
456 if src.product.product_is_cpp(product_info):
458 for cpp_name in src.product.get_product_components(product_info):
459 line_to_add = ("<module name=\"" +
461 "\" gui=\"yes\" path=\"''' + "
462 "os.path.join(dir_bin_name, \"" +
463 cpp_name + "\") + '''\"/>")
466 line_to_add = ("<module name=\"" +
468 "\" gui=\"yes\" path=\"''' + "
469 "os.path.join(dir_bin_name, \"" +
470 product_name + "\") + '''\"/>")
471 text_to_add += line_to_add + "\n"
473 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
475 tmp_file_path = os.path.join(file_dir, "create_appli.py")
476 ff = open(tmp_file_path, "w")
477 ff.write(filled_text)
480 # change the rights in order to make the file executable for everybody
481 os.chmod(tmp_file_path,
492 def binary_package(config, logger, options, tmp_working_dir):
493 '''Prepare a dictionary that stores all the needed directories and files to
494 add in a binary package.
496 :param config Config: The global configuration.
497 :param logger Logger: the logging instance
498 :param options OptResult: the options of the launched command
499 :param tmp_working_dir str: The temporary local directory containing some
500 specific directories or files needed in the
502 :return: the dictionary that stores all the needed directories and files to
503 add in a binary package.
504 {label : (path_on_local_machine, path_in_archive)}
508 # Get the list of product installation to add to the archive
509 l_products_name = sorted(config.APPLICATION.products.keys())
510 l_product_info = src.product.get_products_infos(l_products_name,
515 l_sources_not_present = []
516 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
517 if ("APPLICATION" in config and
518 "properties" in config.APPLICATION and
519 "mesa_launcher_in_package" in config.APPLICATION.properties and
520 config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
521 generate_mesa_launcher=True
523 for prod_name, prod_info in l_product_info:
524 # skip product with property not_in_package set to yes
525 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
528 # Add the sources of the products that have the property
529 # sources_in_package : "yes"
530 if src.get_property_in_product_cfg(prod_info,
531 "sources_in_package") == "yes":
532 if os.path.exists(prod_info.source_dir):
533 l_source_dir.append((prod_name, prod_info.source_dir))
535 l_sources_not_present.append(prod_name)
537 # ignore the native and fixed products for install directories
538 if (src.product.product_is_native(prod_info)
539 or src.product.product_is_fixed(prod_info)
540 or not src.product.product_compiles(prod_info)):
542 if src.product.check_installation(prod_info):
543 l_install_dir.append((prod_name, prod_info.install_dir))
545 l_not_installed.append(prod_name)
547 # Add also the cpp generated modules (if any)
548 if src.product.product_is_cpp(prod_info):
550 for name_cpp in src.product.get_product_components(prod_info):
551 install_dir = os.path.join(config.APPLICATION.workdir,
553 if os.path.exists(install_dir):
554 l_install_dir.append((name_cpp, install_dir))
556 l_not_installed.append(name_cpp)
558 # check the name of the directory that (could) contains the binaries
559 # from previous detar
560 binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
561 if os.path.exists(binaries_from_detar):
563 WARNING: existing binaries directory from previous detar installation:
565 To make new package from this, you have to:
566 1) install binaries in INSTALL directory with the script "install_bin.sh"
567 see README file for more details
568 2) or recompile everything in INSTALL with "sat compile" command
569 this step is long, and requires some linux packages to be installed
571 """ % binaries_from_detar)
573 # Print warning or error if there are some missing products
574 if len(l_not_installed) > 0:
575 text_missing_prods = ""
576 for p_name in l_not_installed:
577 text_missing_prods += "-" + p_name + "\n"
578 if not options.force_creation:
579 msg = _("ERROR: there are missing products installations:")
580 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
585 msg = _("WARNING: there are missing products installations:")
586 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
590 # Do the same for sources
591 if len(l_sources_not_present) > 0:
592 text_missing_prods = ""
593 for p_name in l_sources_not_present:
594 text_missing_prods += "-" + p_name + "\n"
595 if not options.force_creation:
596 msg = _("ERROR: there are missing products sources:")
597 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
602 msg = _("WARNING: there are missing products sources:")
603 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
607 # construct the name of the directory that will contain the binaries
608 binaries_dir_name = "BINARIES-" + config.VARS.dist
610 # construct the correlation table between the product names, there
611 # actual install directories and there install directory in archive
613 for prod_name, install_dir in l_install_dir:
614 path_in_archive = os.path.join(binaries_dir_name, prod_name)
615 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
617 for prod_name, source_dir in l_source_dir:
618 path_in_archive = os.path.join("SOURCES", prod_name)
619 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
621 # for packages of SALOME applications including KERNEL,
622 # we produce a salome launcher or a virtual application (depending on salome version)
623 if 'KERNEL' in config.APPLICATION.products:
624 VersionSalome = src.get_salome_version(config)
625 # Case where SALOME has the launcher that uses the SalomeContext API
626 if VersionSalome >= 730:
627 # create the relative launcher and add it to the files to add
628 launcher_name = src.get_launcher_name(config)
629 launcher_package = produce_relative_launcher(config,
634 not(options.without_commercial))
635 d_products["launcher"] = (launcher_package, launcher_name)
637 # if the application contains mesa products, we generate in addition to the
638 # classical salome launcher a launcher using mesa and called mesa_salome
639 # (the mesa launcher will be used for remote usage through ssh).
640 if generate_mesa_launcher:
641 #if there is one : store the use_mesa property
642 restore_use_mesa_option=None
643 if ('properties' in config.APPLICATION and
644 'use_mesa' in config.APPLICATION.properties):
645 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
647 # activate mesa property, and generate a mesa launcher
648 src.activate_mesa_property(config) #activate use_mesa property
649 launcher_mesa_name="mesa_"+launcher_name
650 launcher_package_mesa = produce_relative_launcher(config,
655 not(options.without_commercial))
656 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
658 # if there was a use_mesa value, we restore it
659 # else we set it to the default value "no"
660 if restore_use_mesa_option != None:
661 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
663 config.APPLICATION.properties.use_mesa="no"
666 # if we mix binaries and sources, we add a copy of the launcher,
667 # prefixed with "bin",in order to avoid clashes
668 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
670 # Provide a script for the creation of an application EDF style
671 appli_script = product_appli_creation_script(config,
676 d_products["appli script"] = (appli_script, "create_appli.py")
678 # Put also the environment file
679 env_file = produce_relative_env_files(config,
684 if src.architecture.is_windows():
685 filename = "env_launch.bat"
687 filename = "env_launch.sh"
688 d_products["environment file"] = (env_file, filename)
692 def source_package(sat, config, logger, options, tmp_working_dir):
693 '''Prepare a dictionary that stores all the needed directories and files to
694 add in a source package.
696 :param config Config: The global configuration.
697 :param logger Logger: the logging instance
698 :param options OptResult: the options of the launched command
699 :param tmp_working_dir str: The temporary local directory containing some
700 specific directories or files needed in the
702 :return: the dictionary that stores all the needed directories and files to
703 add in a source package.
704 {label : (path_on_local_machine, path_in_archive)}
709 # Get all the products that are prepared using an archive
710 # unless ftp mode is specified (in this case the user of the
711 # archive will get the sources through the ftp mode of sat prepare
713 logger.write("Find archive products ... ")
714 d_archives, l_pinfo_vcs = get_archives(config, logger)
715 logger.write("Done\n")
718 if not options.with_vcs and len(l_pinfo_vcs) > 0:
719 # Make archives with the products that are not prepared using an archive
720 # (git, cvs, svn, etc)
721 logger.write("Construct archives for vcs products ... ")
722 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
727 logger.write("Done\n")
730 logger.write("Create the project ... ")
731 d_project = create_project_for_src_package(config,
735 logger.write("Done\n")
738 tmp_sat = add_salomeTools(config, tmp_working_dir)
739 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
741 # Add a sat symbolic link if not win
742 if not src.architecture.is_windows():
743 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
747 # In the jobs, os.getcwd() can fail
748 t = config.LOCAL.workdir
749 os.chdir(tmp_working_dir)
750 if os.path.lexists(tmp_satlink_path):
751 os.remove(tmp_satlink_path)
752 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
755 d_sat["sat link"] = (tmp_satlink_path, "sat")
757 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
760 def get_archives(config, logger):
761 '''Find all the products that are get using an archive and all the products
762 that are get using a vcs (git, cvs, svn) repository.
764 :param config Config: The global configuration.
765 :param logger Logger: the logging instance
766 :return: the dictionary {name_product :
767 (local path of its archive, path in the package of its archive )}
768 and the list of specific configuration corresponding to the vcs
772 # Get the list of product informations
773 l_products_name = config.APPLICATION.products.keys()
774 l_product_info = src.product.get_products_infos(l_products_name,
778 for p_name, p_info in l_product_info:
779 # skip product with property not_in_package set to yes
780 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
782 # ignore the native and fixed products
783 if (src.product.product_is_native(p_info)
784 or src.product.product_is_fixed(p_info)):
786 if p_info.get_source == "archive":
787 archive_path = p_info.archive_info.archive_name
788 archive_name = os.path.basename(archive_path)
790 l_pinfo_vcs.append((p_name, p_info))
792 d_archives[p_name] = (archive_path,
793 os.path.join(ARCHIVE_DIR, archive_name))
794 return d_archives, l_pinfo_vcs
796 def add_salomeTools(config, tmp_working_dir):
797 '''Prepare a version of salomeTools that has a specific local.pyconf file
798 configured for a source package.
800 :param config Config: The global configuration.
801 :param tmp_working_dir str: The temporary local directory containing some
802 specific directories or files needed in the
804 :return: The path to the local salomeTools directory to add in the package
807 # Copy sat in the temporary working directory
808 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
809 sat_running_path = src.Path(config.VARS.salometoolsway)
810 sat_running_path.copy(sat_tmp_path)
812 # Update the local.pyconf file that contains the path to the project
813 local_pyconf_name = "local.pyconf"
814 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
815 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
816 # Remove the .pyconf file in the root directory of salomeTools if there is
817 # any. (For example when launching jobs, a pyconf file describing the jobs
818 # can be here and is not useful)
819 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
820 for file_or_dir in files_or_dir_SAT:
821 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
822 file_path = os.path.join(tmp_working_dir,
827 ff = open(local_pyconf_file, "w")
828 ff.write(LOCAL_TEMPLATE)
831 return sat_tmp_path.path
833 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
834 '''For sources package that require that all products are get using an
835 archive, one has to create some archive for the vcs products.
836 So this method calls the clean and source command of sat and then create
839 :param l_pinfo_vcs List: The list of specific configuration corresponding to
841 :param sat Sat: The Sat instance that can be called to clean and source the
843 :param config Config: The global configuration.
844 :param logger Logger: the logging instance
845 :param tmp_working_dir str: The temporary local directory containing some
846 specific directories or files needed in the
848 :return: the dictionary that stores all the archives to add in the source
849 package. {label : (path_on_local_machine, path_in_archive)}
852 # clean the source directory of all the vcs products, then use the source
853 # command and thus construct an archive that will not contain the patches
854 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
855 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
856 logger.write(_("\nclean sources\n"))
857 args_clean = config.VARS.application
858 args_clean += " --sources --products "
859 args_clean += ",".join(l_prod_names)
860 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
861 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
864 logger.write(_("get sources\n"))
865 args_source = config.VARS.application
866 args_source += " --products "
867 args_source += ",".join(l_prod_names)
868 svgDir = sat.cfg.APPLICATION.workdir
869 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
870 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
871 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
872 # DBG.write("sat config id", id(sat.cfg), True)
873 # shit as config is not same id() as for sat.source()
874 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
876 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
878 # make the new archives
880 for pn, pinfo in l_pinfo_vcs:
881 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
882 logger.write("make archive vcs '%s'\n" % path_archive)
883 d_archives_vcs[pn] = (path_archive,
884 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
885 sat.cfg.APPLICATION.workdir = svgDir
886 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
887 return d_archives_vcs
889 def make_archive(prod_name, prod_info, where):
890 '''Create an archive of a product by searching its source directory.
892 :param prod_name str: The name of the product.
893 :param prod_info Config: The specific configuration corresponding to the
895 :param where str: The path of the repository where to put the resulting
897 :return: The path of the resulting archive
900 path_targz_prod = os.path.join(where, prod_name + ".tgz")
901 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
902 local_path = prod_info.source_dir
903 tar_prod.add(local_path,
905 exclude=exclude_VCS_and_extensions)
907 return path_targz_prod
909 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
910 '''Create a specific project for a source package.
912 :param config Config: The global configuration.
913 :param tmp_working_dir str: The temporary local directory containing some
914 specific directories or files needed in the
916 :param with_vcs boolean: True if the package is with vcs products (not
917 transformed into archive products)
918 :param with_ftp boolean: True if the package use ftp servers to get archives
919 :return: The dictionary
920 {"project" : (produced project, project path in the archive)}
924 # Create in the working temporary directory the full project tree
925 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
926 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
928 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
931 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
934 patches_tmp_dir = os.path.join(project_tmp_dir,
937 application_tmp_dir = os.path.join(project_tmp_dir,
939 for directory in [project_tmp_dir,
940 compil_scripts_tmp_dir,
943 application_tmp_dir]:
944 src.ensure_path_exists(directory)
946 # Create the pyconf that contains the information of the project
947 project_pyconf_name = "project.pyconf"
948 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
949 ff = open(project_pyconf_file, "w")
950 ff.write(PROJECT_TEMPLATE)
951 if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
952 ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
953 for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
954 ftp_path=ftp_path+":"+ftpserver
956 ff.write("# ftp servers where to search for prerequisite archives\n")
958 # add licence paths if any
959 if len(config.PATHS.LICENCEPATH) > 0:
960 licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
961 for path in config.PATHS.LICENCEPATH[1:]:
962 licence_path=licence_path+":"+path
964 ff.write("\n# Where to search for licences\n")
965 ff.write(licence_path)
970 # Loop over the products to get there pyconf and all the scripts
971 # (compilation, environment, patches)
972 # and create the pyconf file to add to the project
973 lproducts_name = config.APPLICATION.products.keys()
974 l_products = src.product.get_products_infos(lproducts_name, config)
975 for p_name, p_info in l_products:
976 # skip product with property not_in_package set to yes
977 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
979 find_product_scripts_and_pyconf(p_name,
983 compil_scripts_tmp_dir,
986 products_pyconf_tmp_dir)
988 find_application_pyconf(config, application_tmp_dir)
990 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
993 def find_product_scripts_and_pyconf(p_name,
997 compil_scripts_tmp_dir,
1000 products_pyconf_tmp_dir):
1001 '''Create a specific pyconf file for a given product. Get its environment
1002 script, its compilation script and patches and put it in the temporary
1003 working directory. This method is used in the source package in order to
1004 construct the specific project.
1006 :param p_name str: The name of the product.
1007 :param p_info Config: The specific configuration corresponding to the
1009 :param config Config: The global configuration.
1010 :param with_vcs boolean: True if the package is with vcs products (not
1011 transformed into archive products)
1012 :param compil_scripts_tmp_dir str: The path to the temporary compilation
1013 scripts directory of the project.
1014 :param env_scripts_tmp_dir str: The path to the temporary environment script
1015 directory of the project.
1016 :param patches_tmp_dir str: The path to the temporary patch scripts
1017 directory of the project.
1018 :param products_pyconf_tmp_dir str: The path to the temporary product
1019 scripts directory of the project.
1022 # read the pyconf of the product
1023 product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
1024 config.PATHS.PRODUCTPATH)
1025 product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
1027 # find the compilation script if any
1028 if src.product.product_has_script(p_info):
1029 compil_script_path = src.Path(p_info.compil_script)
1030 compil_script_path.copy(compil_scripts_tmp_dir)
1031 product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
1032 p_info.compil_script)
1033 # find the environment script if any
1034 if src.product.product_has_env_script(p_info):
1035 env_script_path = src.Path(p_info.environ.env_script)
1036 env_script_path.copy(env_scripts_tmp_dir)
1037 product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
1038 p_info.environ.env_script)
1039 # find the patches if any
1040 if src.product.product_has_patches(p_info):
1041 patches = src.pyconf.Sequence()
1042 for patch_path in p_info.patches:
1043 p_path = src.Path(patch_path)
1044 p_path.copy(patches_tmp_dir)
1045 patches.append(os.path.basename(patch_path), "")
1047 product_pyconf_cfg[p_info.section].patches = patches
1050 # put in the pyconf file the resolved values
1051 for info in ["git_info", "cvs_info", "svn_info"]:
1053 for key in p_info[info]:
1054 product_pyconf_cfg[p_info.section][info][key] = p_info[
1057 # if the product is not archive, then make it become archive.
1058 if src.product.product_is_vcs(p_info):
1059 product_pyconf_cfg[p_info.section].get_source = "archive"
1060 if not "archive_info" in product_pyconf_cfg[p_info.section]:
1061 product_pyconf_cfg[p_info.section].addMapping("archive_info",
1062 src.pyconf.Mapping(product_pyconf_cfg),
1064 product_pyconf_cfg[p_info.section
1065 ].archive_info.archive_name = p_info.name + ".tgz"
1067 # write the pyconf file to the temporary project location
1068 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1070 ff = open(product_tmp_pyconf_path, 'w')
1071 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1072 product_pyconf_cfg.__save__(ff, 1)
1075 def find_application_pyconf(config, application_tmp_dir):
1076 '''Find the application pyconf file and put it in the specific temporary
1077 directory containing the specific project of a source package.
1079 :param config Config: The global configuration.
1080 :param application_tmp_dir str: The path to the temporary application
1081 scripts directory of the project.
1083 # read the pyconf of the application
1084 application_name = config.VARS.application
1085 application_pyconf_path = src.find_file_in_lpath(
1086 application_name + ".pyconf",
1087 config.PATHS.APPLICATIONPATH)
1088 application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1090 # Change the workdir
1091 application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1092 application_pyconf_cfg,
1094 'VARS.salometoolsway + $VARS.sep + ".."')
1096 # Prevent from compilation in base
1097 application_pyconf_cfg.APPLICATION.no_base = "yes"
1099 #remove products that are not in config (which were filtered by --without_properties)
1100 for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1101 if product_name not in config.APPLICATION.products.keys():
1102 application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1104 # write the pyconf file to the temporary application location
1105 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1106 application_name + ".pyconf")
1108 ff = open(application_tmp_pyconf_path, 'w')
1109 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1110 application_pyconf_cfg.__save__(ff, 1)
1113 def sat_package(config, tmp_working_dir, options, logger):
1114 '''Prepare a dictionary that stores all the needed directories and files to
1115 add in a salomeTool package.
1117 :param tmp_working_dir str: The temporary local working directory
1118 :param options OptResult: the options of the launched command
1119 :return: the dictionary that stores all the needed directories and files to
1120 add in a salomeTool package.
1121 {label : (path_on_local_machine, path_in_archive)}
1126 # we include sat himself
1127 d_project["all_sat"]=(config.VARS.salometoolsway, "")
1129 # and we overwrite local.pyconf with a clean wersion.
1130 local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1131 local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1132 local_cfg = src.pyconf.Config(local_file_path)
1133 local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1134 local_cfg.LOCAL["base"] = "default"
1135 local_cfg.LOCAL["workdir"] = "default"
1136 local_cfg.LOCAL["log_dir"] = "default"
1137 local_cfg.LOCAL["archive_dir"] = "default"
1138 local_cfg.LOCAL["VCS"] = "None"
1139 local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1141 # if the archive contains a project, we write its relative path in local.pyconf
1143 project_arch_path = os.path.join("projects", options.project,
1144 os.path.basename(options.project_file_path))
1145 local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1147 ff = open(local_pyconf_tmp_path, 'w')
1148 local_cfg.__save__(ff, 1)
1150 d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1154 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1155 '''Prepare a dictionary that stores all the needed directories and files to
1156 add in a project package.
1158 :param project_file_path str: The path to the local project.
1159 :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1160 :param tmp_working_dir str: The temporary local directory containing some
1161 specific directories or files needed in the
1163 :param embedded_in_sat boolean : the project package is embedded in a sat package
1164 :return: the dictionary that stores all the needed directories and files to
1165 add in a project package.
1166 {label : (path_on_local_machine, path_in_archive)}
1170 # Read the project file and get the directories to add to the package
1173 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1176 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1177 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1178 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1180 paths = {"APPLICATIONPATH" : "applications",
1181 "PRODUCTPATH" : "products",
1183 "MACHINEPATH" : "machines"}
1185 paths["ARCHIVEPATH"] = "archives"
1187 # Loop over the project paths and add it
1188 project_file_name = os.path.basename(project_file_path)
1190 if path not in project_pyconf_cfg:
1193 dest_path = os.path.join("projects", name_project, paths[path])
1194 project_file_dest = os.path.join("projects", name_project, project_file_name)
1196 dest_path = paths[path]
1197 project_file_dest = project_file_name
1199 # Add the directory to the files to add in the package
1200 d_project[path] = (project_pyconf_cfg[path], dest_path)
1202 # Modify the value of the path in the package
1203 project_pyconf_cfg[path] = src.pyconf.Reference(
1206 'project_path + "/' + paths[path] + '"')
1208 # Modify some values
1209 if "project_path" not in project_pyconf_cfg:
1210 project_pyconf_cfg.addMapping("project_path",
1211 src.pyconf.Mapping(project_pyconf_cfg),
1213 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1216 # we don't want to export these two fields
1217 project_pyconf_cfg.__delitem__("file_path")
1218 project_pyconf_cfg.__delitem__("PWD")
1220 project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1222 # Write the project pyconf file
1223 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1224 ff = open(project_pyconf_tmp_path, 'w')
1225 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1226 project_pyconf_cfg.__save__(ff, 1)
1228 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1232 def add_readme(config, options, where):
1233 readme_path = os.path.join(where, "README")
1234 with codecs.open(readme_path, "w", 'utf-8') as f:
1236 # templates for building the header
1238 # This package was generated with sat $version
1241 # Distribution : $dist
1243 In the following, $$ROOT represents the directory where you have installed
1244 SALOME (the directory where this file is located).
1247 readme_compilation_with_binaries="""
1249 compilation based on the binaries used as prerequisites
1250 =======================================================
1252 If you fail to compile the complete application (for example because
1253 you are not root on your system and cannot install missing packages), you
1254 may try a partial compilation based on the binaries.
1255 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1256 and do some substitutions on cmake and .la files (replace the build directories
1258 The procedure to do it is:
1259 1) Remove or rename INSTALL directory if it exists
1260 2) Execute the shell script install_bin.sh:
1263 3) Use SalomeTool (as explained in Sources section) and compile only the
1264 modules you need to (with -p option)
1267 readme_header_tpl=string.Template(readme_header)
1268 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1269 "README_BIN.template")
1270 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1271 "README_LAUNCHER.template")
1272 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1273 "README_BIN_VIRTUAL_APP.template")
1274 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1275 "README_SRC.template")
1276 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1277 "README_PROJECT.template")
1278 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1279 "README_SAT.template")
1281 # prepare substitution dictionary
1283 d['user'] = config.VARS.user
1284 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1285 d['version'] = src.get_salometool_version(config)
1286 d['dist'] = config.VARS.dist
1287 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1289 if options.binaries or options.sources:
1290 d['application'] = config.VARS.application
1291 f.write("# Application: " + d['application'] + "\n")
1292 if 'KERNEL' in config.APPLICATION.products:
1293 VersionSalome = src.get_salome_version(config)
1294 # Case where SALOME has the launcher that uses the SalomeContext API
1295 if VersionSalome >= 730:
1296 d['launcher'] = config.APPLICATION.profile.launcher_name
1298 d['virtual_app'] = 'runAppli' # this info is not used now)
1300 # write the specific sections
1301 if options.binaries:
1302 f.write(src.template.substitute(readme_template_path_bin, d))
1303 if "virtual_app" in d:
1304 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1306 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1309 f.write(src.template.substitute(readme_template_path_src, d))
1311 if options.binaries and options.sources:
1312 f.write(readme_compilation_with_binaries)
1315 f.write(src.template.substitute(readme_template_path_pro, d))
1318 f.write(src.template.substitute(readme_template_path_sat, d))
1322 def update_config(config, prop, value):
1323 '''Remove from config.APPLICATION.products the products that have the property given as input.
1325 :param config Config: The global config.
1326 :param prop str: The property to filter
1327 :param value str: The value of the property to filter
1329 # if there is no APPLICATION (ex sat package -t) : nothing to do
1330 if "APPLICATION" in config:
1331 l_product_to_remove = []
1332 for product_name in config.APPLICATION.products.keys():
1333 prod_cfg = src.product.get_product_config(config, product_name)
1334 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1335 l_product_to_remove.append(product_name)
1336 for product_name in l_product_to_remove:
1337 config.APPLICATION.products.__delitem__(product_name)
1340 '''method that is called when salomeTools is called with --help option.
1342 :return: The text to display for the package command description.
1346 The package command creates a tar file archive of a product.
1347 There are four kinds of archive, which can be mixed:
1349 1 - The binary archive.
1350 It contains the product installation directories plus a launcher.
1351 2 - The sources archive.
1352 It contains the product archives, a project (the application plus salomeTools).
1353 3 - The project archive.
1354 It contains a project (give the project file path as argument).
1355 4 - The salomeTools archive.
1356 It contains code utility salomeTools.
1359 >> sat package SALOME-master --binaries --sources""")
1361 def run(args, runner, logger):
1362 '''method that is called when salomeTools is called with package parameter.
1366 (options, args) = parser.parse_args(args)
1368 # Check that a type of package is called, and only one
1369 all_option_types = (options.binaries,
1371 options.project not in ["", None],
1374 # Check if no option for package type
1375 if all_option_types.count(True) == 0:
1376 msg = _("Error: Precise a type for the package\nUse one of the "
1377 "following options: --binaries, --sources, --project or"
1379 logger.write(src.printcolors.printcError(msg), 1)
1380 logger.write("\n", 1)
1383 # The repository where to put the package if not Binary or Source
1384 package_default_path = runner.cfg.LOCAL.workdir
1386 # if the package contains binaries or sources:
1387 if options.binaries or options.sources:
1388 # Check that the command has been called with an application
1389 src.check_config_has_application(runner.cfg)
1391 # Display information
1392 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1393 runner.cfg.VARS.application), 1)
1395 # Get the default directory where to put the packages
1396 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1397 src.ensure_path_exists(package_default_path)
1399 # if the package contains a project:
1401 # check that the project is visible by SAT
1402 projectNameFile = options.project + ".pyconf"
1404 for i in runner.cfg.PROJECTS.project_file_paths:
1405 baseName = os.path.basename(i)
1406 if baseName == projectNameFile:
1410 if foundProject is None:
1411 local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1412 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1416 Please add it in file:
1418 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1419 logger.write(src.printcolors.printcError(msg), 1)
1420 logger.write("\n", 1)
1423 options.project_file_path = foundProject
1424 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1426 # Remove the products that are filtered by the --without_properties option
1427 if options.without_properties:
1428 app = runner.cfg.APPLICATION
1429 logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1430 prop, value = options.without_properties
1431 update_config(runner.cfg, prop, value)
1432 logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1434 # Remove from config the products that have the not_in_package property
1435 update_config(runner.cfg, "not_in_package", "yes")
1437 # get the name of the archive or build it
1439 if os.path.basename(options.name) == options.name:
1440 # only a name (not a path)
1441 archive_name = options.name
1442 dir_name = package_default_path
1444 archive_name = os.path.basename(options.name)
1445 dir_name = os.path.dirname(options.name)
1447 # suppress extension
1448 if archive_name[-len(".tgz"):] == ".tgz":
1449 archive_name = archive_name[:-len(".tgz")]
1450 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1451 archive_name = archive_name[:-len(".tar.gz")]
1455 dir_name = package_default_path
1456 if options.binaries or options.sources:
1457 archive_name = runner.cfg.APPLICATION.name
1459 if options.binaries:
1460 archive_name += "-"+runner.cfg.VARS.dist
1463 archive_name += "-SRC"
1464 if options.with_vcs:
1465 archive_name += "-VCS"
1468 archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1473 project_name = options.project
1474 archive_name += ("satproject_" + project_name)
1476 if len(archive_name)==0: # no option worked
1477 msg = _("Error: Cannot name the archive\n"
1478 " check if at least one of the following options was "
1479 "selected : --binaries, --sources, --project or"
1481 logger.write(src.printcolors.printcError(msg), 1)
1482 logger.write("\n", 1)
1485 path_targz = os.path.join(dir_name, archive_name + ".tgz")
1487 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1489 # Create a working directory for all files that are produced during the
1490 # package creation and that will be removed at the end of the command
1491 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1492 src.ensure_path_exists(tmp_working_dir)
1493 logger.write("\n", 5)
1494 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1496 logger.write("\n", 3)
1498 msg = _("Preparation of files to add to the archive")
1499 logger.write(src.printcolors.printcLabel(msg), 2)
1500 logger.write("\n", 2)
1502 d_files_to_add={} # content of the archive
1504 # a dict to hold paths that will need to be substitute for users recompilations
1505 d_paths_to_substitute={}
1507 if options.binaries:
1508 d_bin_files_to_add = binary_package(runner.cfg,
1512 # for all binaries dir, store the substitution that will be required
1513 # for extra compilations
1514 for key in d_bin_files_to_add:
1515 if key.endswith("(bin)"):
1516 source_dir = d_bin_files_to_add[key][0]
1517 path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" + runner.cfg.VARS.dist,"INSTALL")
1518 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1519 # if basename is the same we will just substitute the dirname
1520 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1521 os.path.dirname(path_in_archive)
1523 d_paths_to_substitute[source_dir]=path_in_archive
1525 d_files_to_add.update(d_bin_files_to_add)
1528 d_files_to_add.update(source_package(runner,
1533 if options.binaries:
1534 # for archives with bin and sources we provide a shell script able to
1535 # install binaries for compilation
1536 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1538 d_paths_to_substitute,
1540 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1541 logger.write("substitutions that need to be done later : \n", 5)
1542 logger.write(str(d_paths_to_substitute), 5)
1543 logger.write("\n", 5)
1545 # --salomeTool option is not considered when --sources is selected, as this option
1546 # already brings salomeTool!
1548 d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir,
1552 DBG.write("config for package %s" % project_name, runner.cfg)
1553 d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1555 if not(d_files_to_add):
1556 msg = _("Error: Empty dictionnary to build the archive!\n")
1557 logger.write(src.printcolors.printcError(msg), 1)
1558 logger.write("\n", 1)
1561 # Add the README file in the package
1562 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1563 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1565 # Add the additional files of option add_files
1566 if options.add_files:
1567 for file_path in options.add_files:
1568 if not os.path.exists(file_path):
1569 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1571 file_name = os.path.basename(file_path)
1572 d_files_to_add[file_name] = (file_path, file_name)
1574 logger.write("\n", 2)
1575 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1576 logger.write("\n", 2)
1577 logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1581 # Creating the object tarfile
1582 tar = tarfile.open(path_targz, mode='w:gz')
1584 # get the filtering function if needed
1585 filter_function = exclude_VCS_and_extensions
1587 # Add the files to the tarfile object
1588 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1590 except KeyboardInterrupt:
1591 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1592 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1593 # remove the working directory
1594 shutil.rmtree(tmp_working_dir)
1595 logger.write(_("OK"), 1)
1596 logger.write(_("\n"), 1)
1599 # case if no application, only package sat as 'sat package -t'
1601 app = runner.cfg.APPLICATION
1605 # unconditionaly remove the tmp_local_working_dir
1607 tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1608 if os.path.isdir(tmp_local_working_dir):
1609 shutil.rmtree(tmp_local_working_dir)
1611 # remove the tmp directory, unless user has registered as developer
1612 if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1613 shutil.rmtree(tmp_working_dir)
1615 # Print again the path of the package
1616 logger.write("\n", 2)
1617 src.printcolors.print_value(logger, "Package path", path_targz, 2)