3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
30 from application import get_SALOME_modules
31 import src.debug as DBG
38 ARCHIVE_DIR = "ARCHIVES"
39 PROJECT_DIR = "PROJECT"
41 IGNORED_DIRS = [".git", ".svn"]
42 IGNORED_EXTENSIONS = []
44 PROJECT_TEMPLATE = """#!/usr/bin/env python
47 # The path to the archive root directory
48 root_path : $PWD + "/../"
50 project_path : $PWD + "/"
52 # Where to search the archives of the products
53 ARCHIVEPATH : $root_path + "ARCHIVES"
54 # Where to search the pyconf of the applications
55 APPLICATIONPATH : $project_path + "applications/"
56 # Where to search the pyconf of the products
57 PRODUCTPATH : $project_path + "products/"
58 # Where to search the pyconf of the jobs of the project
59 JOBPATH : $project_path + "jobs/"
60 # Where to search the pyconf of the machines of the project
61 MACHINEPATH : $project_path + "machines/"
64 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
72 archive_dir : 'default'
79 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
80 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
84 # Define all possible option for the package command : sat package <options>
85 parser = src.options.Options()
86 parser.add_option('b', 'binaries', 'boolean', 'binaries',
87 _('Optional: Produce a binary package.'), False)
88 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
89 _('Optional: Only binary package: produce the archive even if '
90 'there are some missing products.'), False)
91 parser.add_option('s', 'sources', 'boolean', 'sources',
92 _('Optional: Produce a compilable archive of the sources of the '
93 'application.'), False)
94 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
95 _('Optional: Do not make archive for products in VCS mode (git, cvs, svn). '
96 'Sat prepare will use VCS mode instead to retrieve them'),
98 parser.add_option('', 'ftp', 'boolean', 'ftp',
99 _('Optional: Do not embed archives for products in archive mode.'
100 'Sat prepare will use ftp instead to retrieve them'),
102 parser.add_option('p', 'project', 'string', 'project',
103 _('Optional: Produce an archive that contains a project.'), "")
104 parser.add_option('t', 'salometools', 'boolean', 'sat',
105 _('Optional: Produce an archive that contains salomeTools.'), False)
106 parser.add_option('n', 'name', 'string', 'name',
107 _('Optional: The name or full path of the archive.'), None)
108 parser.add_option('', 'add_files', 'list2', 'add_files',
109 _('Optional: The list of additional files to add to the archive.'), [])
110 parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
111 _('Optional: do not add commercial licence.'), False)
112 parser.add_option('', 'without_properties', 'properties', 'without_properties',
113 _('Optional: Filter the products by their properties.\n\tSyntax: '
114 '--without_properties <property>:<value>'))
117 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
118 '''Create an archive containing all directories and files that are given in
119 the d_content argument.
121 :param tar tarfile: The tarfile instance used to make the archive.
122 :param name_archive str: The name of the archive to make.
123 :param d_content dict: The dictionary that contain all directories and files
124 to add in the archive.
126 (path_on_local_machine, path_in_archive)
127 :param logger Logger: the logging instance
128 :param f_exclude Function: the function that filters
129 :return: 0 if success, 1 if not.
132 # get the max length of the messages in order to make the display
133 max_len = len(max(d_content.keys(), key=len))
136 # loop over each directory or file stored in the d_content dictionary
137 names = sorted(d_content.keys())
138 DBG.write("add tar names", names)
141 # display information
142 len_points = max_len - len(name) + 3
143 local_path, archive_path = d_content[name]
144 in_archive = os.path.join(name_archive, archive_path)
145 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
146 # Get the local path and the path in archive
147 # of the directory or file to add
148 # Add it in the archive
150 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
151 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
152 except Exception as e:
153 logger.write(src.printcolors.printcError(_("KO ")), 3)
154 logger.write(str(e), 3)
156 logger.write("\n", 3)
159 def exclude_VCS_and_extensions(filename):
160 ''' The function that is used to exclude from package the link to the
161 VCS repositories (like .git)
163 :param filename Str: The filname to exclude (or not).
164 :return: True if the file has to be exclude
167 for dir_name in IGNORED_DIRS:
168 if dir_name in filename:
170 for extension in IGNORED_EXTENSIONS:
171 if filename.endswith(extension):
175 def produce_relative_launcher(config,
180 with_commercial=True):
181 '''Create a specific SALOME launcher for the binary package. This launcher
184 :param config Config: The global configuration.
185 :param logger Logger: the logging instance
186 :param file_dir str: the directory where to put the launcher
187 :param file_name str: The launcher name
188 :param binaries_dir_name str: the name of the repository where the binaries
190 :return: the path of the produced launcher
194 # get KERNEL installation path
195 kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
197 # set kernel bin dir (considering fhs property)
198 kernel_cfg = src.product.get_product_config(config, "KERNEL")
199 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
200 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
202 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
204 # check if the application contains an application module
205 # check also if the application has a distene product,
206 # in this case get its licence file name
207 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(), config)
208 salome_application_name="Not defined"
209 distene_licence_file_name=False
210 for prod_name, prod_info in l_product_info:
211 # look for a "salome application" and a distene product
212 if src.get_property_in_product_cfg(prod_info, "is_distene") == "yes":
213 distene_licence_file_name = src.product.product_has_licence(prod_info,
214 config.PATHS.LICENCEPATH)
215 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
216 salome_application_name=prod_info.name
218 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
219 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
220 if salome_application_name == "Not defined":
221 app_root_dir=kernel_root_dir
223 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
225 # Get the launcher template and do substitutions
226 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
227 withProfile = src.fileEnviron.withProfile3
229 withProfile = src.fileEnviron.withProfile
231 withProfile = withProfile.replace(
232 "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
233 "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + app_root_dir + "'")
234 withProfile = withProfile.replace(
235 " 'BIN_KERNEL_INSTALL_DIR'",
236 " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
238 before, after = withProfile.split("# here your local standalone environment\n")
240 # create an environment file writer
241 writer = src.environment.FileEnvWriter(config,
246 filepath = os.path.join(file_dir, file_name)
247 # open the file and write into it
248 launch_file = open(filepath, "w")
249 launch_file.write(before)
251 writer.write_cfgForPy_file(launch_file,
252 for_package = binaries_dir_name,
253 with_commercial=with_commercial)
254 launch_file.write(after)
257 # Little hack to put out_dir_Path outside the strings
258 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
260 # A hack to put a call to a file for distene licence.
261 # It does nothing to an application that has no distene product
262 if distene_licence_file_name:
263 logger.write("Application has a distene licence file! We use it in package launcher", 5)
264 hack_for_distene_licence(filepath, distene_licence_file_name)
266 # change the rights in order to make the file executable for everybody
278 def hack_for_distene_licence(filepath, licence_file):
279 '''Replace the distene licence env variable by a call to a file.
281 :param filepath Str: The path to the launcher to modify.
283 shutil.move(filepath, filepath + "_old")
285 filein = filepath + "_old"
286 fin = open(filein, "r")
287 fout = open(fileout, "w")
288 text = fin.readlines()
289 # Find the Distene section
291 for i,line in enumerate(text):
292 if "# Set DISTENE License" in line:
296 # No distene product, there is nothing to do
302 del text[num_line +1]
303 del text[num_line +1]
304 text_to_insert =""" try:
305 distene_licence_file="%s"
306 if sys.version_info[0] >= 3 and sys.version_info[1] >= 5:
307 import importlib.util
308 spec_dist = importlib.util.spec_from_file_location("distene_licence", distene_licence_file)
309 distene=importlib.util.module_from_spec(spec_dist)
310 spec_dist.loader.exec_module(distene)
313 distene = imp.load_source('distene_licence', distene_licence_file)
314 distene.set_distene_variables(context)
316 pass\n""" % licence_file
317 text.insert(num_line + 1, text_to_insert)
324 def produce_relative_env_files(config,
328 '''Create some specific environment files for the binary package. These
329 files use relative paths.
331 :param config Config: The global configuration.
332 :param logger Logger: the logging instance
333 :param file_dir str: the directory where to put the files
334 :param binaries_dir_name str: the name of the repository where the binaries
336 :return: the list of path of the produced environment files
339 # create an environment file writer
340 writer = src.environment.FileEnvWriter(config,
346 filepath = writer.write_env_file("env_launch.sh",
349 for_package = binaries_dir_name)
351 # Little hack to put out_dir_Path as environment variable
352 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
354 # change the rights in order to make the file executable for everybody
366 def produce_install_bin_file(config,
371 '''Create a bash shell script which do substitutions in BIRARIES dir
372 in order to use it for extra compilations.
374 :param config Config: The global configuration.
375 :param logger Logger: the logging instance
376 :param file_dir str: the directory where to put the files
377 :param d_sub, dict: the dictionnary that contains the substitutions to be done
378 :param file_name str: the name of the install script file
379 :return: the produced file
383 filepath = os.path.join(file_dir, file_name)
384 # open the file and write into it
385 # use codec utf-8 as sat variables are in unicode
386 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
387 installbin_template_path = os.path.join(config.VARS.internal_dir,
388 "INSTALL_BIN.template")
390 # build the name of the directory that will contain the binaries
391 binaries_dir_name = "BINARIES-" + config.VARS.dist
392 # build the substitution loop
393 loop_cmd = "for f in $(grep -RIl"
395 loop_cmd += " -e "+ key
396 loop_cmd += ' INSTALL); do\n sed -i "\n'
398 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
399 loop_cmd += ' " $f\ndone'
402 d["BINARIES_DIR"] = binaries_dir_name
403 d["SUBSTITUTION_LOOP"]=loop_cmd
405 # substitute the template and write it in file
406 content=src.template.substitute(installbin_template_path, d)
407 installbin_file.write(content)
408 # change the rights in order to make the file executable for everybody
420 def product_appli_creation_script(config,
424 '''Create a script that can produce an application (EDF style) in the binary
427 :param config Config: The global configuration.
428 :param logger Logger: the logging instance
429 :param file_dir str: the directory where to put the file
430 :param binaries_dir_name str: the name of the repository where the binaries
432 :return: the path of the produced script file
435 template_name = "create_appli.py.for_bin_packages.template"
436 template_path = os.path.join(config.VARS.internal_dir, template_name)
437 text_to_fill = open(template_path, "r").read()
438 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
439 '"' + binaries_dir_name + '"')
442 for product_name in get_SALOME_modules(config):
443 product_info = src.product.get_product_config(config, product_name)
445 if src.product.product_is_smesh_plugin(product_info):
448 if 'install_dir' in product_info and bool(product_info.install_dir):
449 if src.product.product_is_cpp(product_info):
451 for cpp_name in src.product.get_product_components(product_info):
452 line_to_add = ("<module name=\"" +
454 "\" gui=\"yes\" path=\"''' + "
455 "os.path.join(dir_bin_name, \"" +
456 cpp_name + "\") + '''\"/>")
459 line_to_add = ("<module name=\"" +
461 "\" gui=\"yes\" path=\"''' + "
462 "os.path.join(dir_bin_name, \"" +
463 product_name + "\") + '''\"/>")
464 text_to_add += line_to_add + "\n"
466 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
468 tmp_file_path = os.path.join(file_dir, "create_appli.py")
469 ff = open(tmp_file_path, "w")
470 ff.write(filled_text)
473 # change the rights in order to make the file executable for everybody
474 os.chmod(tmp_file_path,
485 def binary_package(config, logger, options, tmp_working_dir):
486 '''Prepare a dictionary that stores all the needed directories and files to
487 add in a binary package.
489 :param config Config: The global configuration.
490 :param logger Logger: the logging instance
491 :param options OptResult: the options of the launched command
492 :param tmp_working_dir str: The temporary local directory containing some
493 specific directories or files needed in the
495 :return: the dictionary that stores all the needed directories and files to
496 add in a binary package.
497 {label : (path_on_local_machine, path_in_archive)}
501 # Get the list of product installation to add to the archive
502 l_products_name = sorted(config.APPLICATION.products.keys())
503 l_product_info = src.product.get_products_infos(l_products_name,
508 l_sources_not_present = []
509 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
510 if ("APPLICATION" in config and
511 "properties" in config.APPLICATION and
512 "mesa_launcher_in_package" in config.APPLICATION.properties and
513 config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
514 generate_mesa_launcher=True
516 for prod_name, prod_info in l_product_info:
517 # skip product with property not_in_package set to yes
518 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
521 # Add the sources of the products that have the property
522 # sources_in_package : "yes"
523 if src.get_property_in_product_cfg(prod_info,
524 "sources_in_package") == "yes":
525 if os.path.exists(prod_info.source_dir):
526 l_source_dir.append((prod_name, prod_info.source_dir))
528 l_sources_not_present.append(prod_name)
530 # ignore the native and fixed products for install directories
531 if (src.product.product_is_native(prod_info)
532 or src.product.product_is_fixed(prod_info)
533 or not src.product.product_compiles(prod_info)):
535 if src.product.check_installation(prod_info):
536 l_install_dir.append((prod_name, prod_info.install_dir))
538 l_not_installed.append(prod_name)
540 # Add also the cpp generated modules (if any)
541 if src.product.product_is_cpp(prod_info):
543 for name_cpp in src.product.get_product_components(prod_info):
544 install_dir = os.path.join(config.APPLICATION.workdir,
546 if os.path.exists(install_dir):
547 l_install_dir.append((name_cpp, install_dir))
549 l_not_installed.append(name_cpp)
551 # check the name of the directory that (could) contains the binaries
552 # from previous detar
553 binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
554 if os.path.exists(binaries_from_detar):
556 WARNING: existing binaries directory from previous detar installation:
558 To make new package from this, you have to:
559 1) install binaries in INSTALL directory with the script "install_bin.sh"
560 see README file for more details
561 2) or recompile everything in INSTALL with "sat compile" command
562 this step is long, and requires some linux packages to be installed
564 """ % binaries_from_detar)
566 # Print warning or error if there are some missing products
567 if len(l_not_installed) > 0:
568 text_missing_prods = ""
569 for p_name in l_not_installed:
570 text_missing_prods += "-" + p_name + "\n"
571 if not options.force_creation:
572 msg = _("ERROR: there are missing products installations:")
573 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
578 msg = _("WARNING: there are missing products installations:")
579 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
583 # Do the same for sources
584 if len(l_sources_not_present) > 0:
585 text_missing_prods = ""
586 for p_name in l_sources_not_present:
587 text_missing_prods += "-" + p_name + "\n"
588 if not options.force_creation:
589 msg = _("ERROR: there are missing products sources:")
590 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
595 msg = _("WARNING: there are missing products sources:")
596 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
600 # construct the name of the directory that will contain the binaries
601 binaries_dir_name = "BINARIES-" + config.VARS.dist
603 # construct the correlation table between the product names, there
604 # actual install directories and there install directory in archive
606 for prod_name, install_dir in l_install_dir:
607 path_in_archive = os.path.join(binaries_dir_name, prod_name)
608 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
610 for prod_name, source_dir in l_source_dir:
611 path_in_archive = os.path.join("SOURCES", prod_name)
612 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
614 # for packages of SALOME applications including KERNEL,
615 # we produce a salome launcher or a virtual application (depending on salome version)
616 if 'KERNEL' in config.APPLICATION.products:
617 VersionSalome = src.get_salome_version(config)
618 # Case where SALOME has the launcher that uses the SalomeContext API
619 if VersionSalome >= 730:
620 # create the relative launcher and add it to the files to add
621 launcher_name = src.get_launcher_name(config)
622 launcher_package = produce_relative_launcher(config,
627 not(options.without_commercial))
628 d_products["launcher"] = (launcher_package, launcher_name)
630 # if the application contains mesa products, we generate in addition to the
631 # classical salome launcher a launcher using mesa and called mesa_salome
632 # (the mesa launcher will be used for remote usage through ssh).
633 if generate_mesa_launcher:
634 #if there is one : store the use_mesa property
635 restore_use_mesa_option=None
636 if ('properties' in config.APPLICATION and
637 'use_mesa' in config.APPLICATION.properties):
638 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
640 # activate mesa property, and generate a mesa launcher
641 src.activate_mesa_property(config) #activate use_mesa property
642 launcher_mesa_name="mesa_"+launcher_name
643 launcher_package_mesa = produce_relative_launcher(config,
648 not(options.without_commercial))
649 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
651 # if there was a use_mesa value, we restore it
652 # else we set it to the default value "no"
653 if restore_use_mesa_option != None:
654 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
656 config.APPLICATION.properties.use_mesa="no"
659 # if we mix binaries and sources, we add a copy of the launcher,
660 # prefixed with "bin",in order to avoid clashes
661 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
663 # Provide a script for the creation of an application EDF style
664 appli_script = product_appli_creation_script(config,
669 d_products["appli script"] = (appli_script, "create_appli.py")
671 # Put also the environment file
672 env_file = produce_relative_env_files(config,
677 d_products["environment file"] = (env_file, "env_launch.sh")
681 def source_package(sat, config, logger, options, tmp_working_dir):
682 '''Prepare a dictionary that stores all the needed directories and files to
683 add in a source package.
685 :param config Config: The global configuration.
686 :param logger Logger: the logging instance
687 :param options OptResult: the options of the launched command
688 :param tmp_working_dir str: The temporary local directory containing some
689 specific directories or files needed in the
691 :return: the dictionary that stores all the needed directories and files to
692 add in a source package.
693 {label : (path_on_local_machine, path_in_archive)}
698 # Get all the products that are prepared using an archive
699 # unless ftp mode is specified (in this case the user of the
700 # archive will get the sources through the ftp mode of sat prepare
702 logger.write("Find archive products ... ")
703 d_archives, l_pinfo_vcs = get_archives(config, logger)
704 logger.write("Done\n")
707 if not options.with_vcs and len(l_pinfo_vcs) > 0:
708 # Make archives with the products that are not prepared using an archive
709 # (git, cvs, svn, etc)
710 logger.write("Construct archives for vcs products ... ")
711 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
716 logger.write("Done\n")
719 logger.write("Create the project ... ")
720 d_project = create_project_for_src_package(config,
724 logger.write("Done\n")
727 tmp_sat = add_salomeTools(config, tmp_working_dir)
728 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
730 # Add a sat symbolic link if not win
731 if not src.architecture.is_windows():
732 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
736 # In the jobs, os.getcwd() can fail
737 t = config.LOCAL.workdir
738 os.chdir(tmp_working_dir)
739 if os.path.lexists(tmp_satlink_path):
740 os.remove(tmp_satlink_path)
741 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
744 d_sat["sat link"] = (tmp_satlink_path, "sat")
746 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
749 def get_archives(config, logger):
750 '''Find all the products that are get using an archive and all the products
751 that are get using a vcs (git, cvs, svn) repository.
753 :param config Config: The global configuration.
754 :param logger Logger: the logging instance
755 :return: the dictionary {name_product :
756 (local path of its archive, path in the package of its archive )}
757 and the list of specific configuration corresponding to the vcs
761 # Get the list of product informations
762 l_products_name = config.APPLICATION.products.keys()
763 l_product_info = src.product.get_products_infos(l_products_name,
767 for p_name, p_info in l_product_info:
768 # skip product with property not_in_package set to yes
769 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
771 # ignore the native and fixed products
772 if (src.product.product_is_native(p_info)
773 or src.product.product_is_fixed(p_info)):
775 if p_info.get_source == "archive":
776 archive_path = p_info.archive_info.archive_name
777 archive_name = os.path.basename(archive_path)
779 l_pinfo_vcs.append((p_name, p_info))
781 d_archives[p_name] = (archive_path,
782 os.path.join(ARCHIVE_DIR, archive_name))
783 return d_archives, l_pinfo_vcs
785 def add_salomeTools(config, tmp_working_dir):
786 '''Prepare a version of salomeTools that has a specific local.pyconf file
787 configured for a source package.
789 :param config Config: The global configuration.
790 :param tmp_working_dir str: The temporary local directory containing some
791 specific directories or files needed in the
793 :return: The path to the local salomeTools directory to add in the package
796 # Copy sat in the temporary working directory
797 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
798 sat_running_path = src.Path(config.VARS.salometoolsway)
799 sat_running_path.copy(sat_tmp_path)
801 # Update the local.pyconf file that contains the path to the project
802 local_pyconf_name = "local.pyconf"
803 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
804 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
805 # Remove the .pyconf file in the root directory of salomeTools if there is
806 # any. (For example when launching jobs, a pyconf file describing the jobs
807 # can be here and is not useful)
808 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
809 for file_or_dir in files_or_dir_SAT:
810 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
811 file_path = os.path.join(tmp_working_dir,
816 ff = open(local_pyconf_file, "w")
817 ff.write(LOCAL_TEMPLATE)
820 return sat_tmp_path.path
822 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
823 '''For sources package that require that all products are get using an
824 archive, one has to create some archive for the vcs products.
825 So this method calls the clean and source command of sat and then create
828 :param l_pinfo_vcs List: The list of specific configuration corresponding to
830 :param sat Sat: The Sat instance that can be called to clean and source the
832 :param config Config: The global configuration.
833 :param logger Logger: the logging instance
834 :param tmp_working_dir str: The temporary local directory containing some
835 specific directories or files needed in the
837 :return: the dictionary that stores all the archives to add in the source
838 package. {label : (path_on_local_machine, path_in_archive)}
841 # clean the source directory of all the vcs products, then use the source
842 # command and thus construct an archive that will not contain the patches
843 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
844 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
845 logger.write(_("\nclean sources\n"))
846 args_clean = config.VARS.application
847 args_clean += " --sources --products "
848 args_clean += ",".join(l_prod_names)
849 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
850 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
853 logger.write(_("get sources\n"))
854 args_source = config.VARS.application
855 args_source += " --products "
856 args_source += ",".join(l_prod_names)
857 svgDir = sat.cfg.APPLICATION.workdir
858 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
859 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
860 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
861 # DBG.write("sat config id", id(sat.cfg), True)
862 # shit as config is not same id() as for sat.source()
863 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
865 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
867 # make the new archives
869 for pn, pinfo in l_pinfo_vcs:
870 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
871 logger.write("make archive vcs '%s'\n" % path_archive)
872 d_archives_vcs[pn] = (path_archive,
873 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
874 sat.cfg.APPLICATION.workdir = svgDir
875 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
876 return d_archives_vcs
878 def make_archive(prod_name, prod_info, where):
879 '''Create an archive of a product by searching its source directory.
881 :param prod_name str: The name of the product.
882 :param prod_info Config: The specific configuration corresponding to the
884 :param where str: The path of the repository where to put the resulting
886 :return: The path of the resulting archive
889 path_targz_prod = os.path.join(where, prod_name + ".tgz")
890 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
891 local_path = prod_info.source_dir
892 tar_prod.add(local_path,
894 exclude=exclude_VCS_and_extensions)
896 return path_targz_prod
898 def create_project_for_src_package(config, tmp_working_dir, with_vcs, with_ftp):
899 '''Create a specific project for a source package.
901 :param config Config: The global configuration.
902 :param tmp_working_dir str: The temporary local directory containing some
903 specific directories or files needed in the
905 :param with_vcs boolean: True if the package is with vcs products (not
906 transformed into archive products)
907 :param with_ftp boolean: True if the package use ftp servers to get archives
908 :return: The dictionary
909 {"project" : (produced project, project path in the archive)}
913 # Create in the working temporary directory the full project tree
914 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
915 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
917 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
920 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
923 patches_tmp_dir = os.path.join(project_tmp_dir,
926 application_tmp_dir = os.path.join(project_tmp_dir,
928 for directory in [project_tmp_dir,
929 compil_scripts_tmp_dir,
932 application_tmp_dir]:
933 src.ensure_path_exists(directory)
935 # Create the pyconf that contains the information of the project
936 project_pyconf_name = "project.pyconf"
937 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
938 ff = open(project_pyconf_file, "w")
939 ff.write(PROJECT_TEMPLATE)
940 if with_ftp and len(config.PATHS.ARCHIVEFTP) > 0:
941 ftp_path='ARCHIVEFTP : "'+config.PATHS.ARCHIVEFTP[0]
942 for ftpserver in config.PATHS.ARCHIVEFTP[1:]:
943 ftp_path=ftp_path+":"+ftpserver
945 ff.write("# ftp servers where to search for prerequisite archives\n")
947 # add licence paths if any
948 if len(config.PATHS.LICENCEPATH) > 0:
949 licence_path='LICENCEPATH : "'+config.PATHS.LICENCEPATH[0]
950 for path in config.PATHS.LICENCEPATH[1:]:
951 licence_path=licence_path+":"+path
953 ff.write("\n# Where to search for licences\n")
954 ff.write(licence_path)
959 # Loop over the products to get there pyconf and all the scripts
960 # (compilation, environment, patches)
961 # and create the pyconf file to add to the project
962 lproducts_name = config.APPLICATION.products.keys()
963 l_products = src.product.get_products_infos(lproducts_name, config)
964 for p_name, p_info in l_products:
965 # skip product with property not_in_package set to yes
966 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
968 find_product_scripts_and_pyconf(p_name,
972 compil_scripts_tmp_dir,
975 products_pyconf_tmp_dir)
977 find_application_pyconf(config, application_tmp_dir)
979 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
982 def find_product_scripts_and_pyconf(p_name,
986 compil_scripts_tmp_dir,
989 products_pyconf_tmp_dir):
990 '''Create a specific pyconf file for a given product. Get its environment
991 script, its compilation script and patches and put it in the temporary
992 working directory. This method is used in the source package in order to
993 construct the specific project.
995 :param p_name str: The name of the product.
996 :param p_info Config: The specific configuration corresponding to the
998 :param config Config: The global configuration.
999 :param with_vcs boolean: True if the package is with vcs products (not
1000 transformed into archive products)
1001 :param compil_scripts_tmp_dir str: The path to the temporary compilation
1002 scripts directory of the project.
1003 :param env_scripts_tmp_dir str: The path to the temporary environment script
1004 directory of the project.
1005 :param patches_tmp_dir str: The path to the temporary patch scripts
1006 directory of the project.
1007 :param products_pyconf_tmp_dir str: The path to the temporary product
1008 scripts directory of the project.
1011 # read the pyconf of the product
1012 product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
1013 config.PATHS.PRODUCTPATH)
1014 product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
1016 # find the compilation script if any
1017 if src.product.product_has_script(p_info):
1018 compil_script_path = src.Path(p_info.compil_script)
1019 compil_script_path.copy(compil_scripts_tmp_dir)
1020 product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
1021 p_info.compil_script)
1022 # find the environment script if any
1023 if src.product.product_has_env_script(p_info):
1024 env_script_path = src.Path(p_info.environ.env_script)
1025 env_script_path.copy(env_scripts_tmp_dir)
1026 product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
1027 p_info.environ.env_script)
1028 # find the patches if any
1029 if src.product.product_has_patches(p_info):
1030 patches = src.pyconf.Sequence()
1031 for patch_path in p_info.patches:
1032 p_path = src.Path(patch_path)
1033 p_path.copy(patches_tmp_dir)
1034 patches.append(os.path.basename(patch_path), "")
1036 product_pyconf_cfg[p_info.section].patches = patches
1039 # put in the pyconf file the resolved values
1040 for info in ["git_info", "cvs_info", "svn_info"]:
1042 for key in p_info[info]:
1043 product_pyconf_cfg[p_info.section][info][key] = p_info[
1046 # if the product is not archive, then make it become archive.
1047 if src.product.product_is_vcs(p_info):
1048 product_pyconf_cfg[p_info.section].get_source = "archive"
1049 if not "archive_info" in product_pyconf_cfg[p_info.section]:
1050 product_pyconf_cfg[p_info.section].addMapping("archive_info",
1051 src.pyconf.Mapping(product_pyconf_cfg),
1053 product_pyconf_cfg[p_info.section
1054 ].archive_info.archive_name = p_info.name + ".tgz"
1056 # write the pyconf file to the temporary project location
1057 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1059 ff = open(product_tmp_pyconf_path, 'w')
1060 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1061 product_pyconf_cfg.__save__(ff, 1)
1064 def find_application_pyconf(config, application_tmp_dir):
1065 '''Find the application pyconf file and put it in the specific temporary
1066 directory containing the specific project of a source package.
1068 :param config Config: The global configuration.
1069 :param application_tmp_dir str: The path to the temporary application
1070 scripts directory of the project.
1072 # read the pyconf of the application
1073 application_name = config.VARS.application
1074 application_pyconf_path = src.find_file_in_lpath(
1075 application_name + ".pyconf",
1076 config.PATHS.APPLICATIONPATH)
1077 application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1079 # Change the workdir
1080 application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1081 application_pyconf_cfg,
1083 'VARS.salometoolsway + $VARS.sep + ".."')
1085 # Prevent from compilation in base
1086 application_pyconf_cfg.APPLICATION.no_base = "yes"
1088 #remove products that are not in config (which were filtered by --without_properties)
1089 for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1090 if product_name not in config.APPLICATION.products.keys():
1091 application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1093 # write the pyconf file to the temporary application location
1094 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1095 application_name + ".pyconf")
1097 ff = open(application_tmp_pyconf_path, 'w')
1098 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1099 application_pyconf_cfg.__save__(ff, 1)
1102 def sat_package(config, tmp_working_dir, options, logger):
1103 '''Prepare a dictionary that stores all the needed directories and files to
1104 add in a salomeTool package.
1106 :param tmp_working_dir str: The temporary local working directory
1107 :param options OptResult: the options of the launched command
1108 :return: the dictionary that stores all the needed directories and files to
1109 add in a salomeTool package.
1110 {label : (path_on_local_machine, path_in_archive)}
1115 # we include sat himself
1116 d_project["all_sat"]=(config.VARS.salometoolsway, "")
1118 # and we overwrite local.pyconf with a clean wersion.
1119 local_pyconf_tmp_path = os.path.join(tmp_working_dir, "local.pyconf")
1120 local_file_path = os.path.join(config.VARS.datadir, "local.pyconf")
1121 local_cfg = src.pyconf.Config(local_file_path)
1122 local_cfg.PROJECTS.project_file_paths=src.pyconf.Sequence(local_cfg.PROJECTS)
1123 local_cfg.LOCAL["base"] = "default"
1124 local_cfg.LOCAL["workdir"] = "default"
1125 local_cfg.LOCAL["log_dir"] = "default"
1126 local_cfg.LOCAL["archive_dir"] = "default"
1127 local_cfg.LOCAL["VCS"] = "None"
1128 local_cfg.LOCAL["tag"] = src.get_salometool_version(config)
1130 # if the archive contains a project, we write its relative path in local.pyconf
1132 project_arch_path = os.path.join("projects", options.project,
1133 os.path.basename(options.project_file_path))
1134 local_cfg.PROJECTS.project_file_paths.append(project_arch_path, "")
1136 ff = open(local_pyconf_tmp_path, 'w')
1137 local_cfg.__save__(ff, 1)
1139 d_project["local.pyconf"]=(local_pyconf_tmp_path, "data/local.pyconf")
1143 def project_package(config, name_project, project_file_path, ftp_mode, tmp_working_dir, embedded_in_sat, logger):
1144 '''Prepare a dictionary that stores all the needed directories and files to
1145 add in a project package.
1147 :param project_file_path str: The path to the local project.
1148 :param ftp_mode boolean: Do not embed archives, the archive will rely on ftp mode to retrieve them.
1149 :param tmp_working_dir str: The temporary local directory containing some
1150 specific directories or files needed in the
1152 :param embedded_in_sat boolean : the project package is embedded in a sat package
1153 :return: the dictionary that stores all the needed directories and files to
1154 add in a project package.
1155 {label : (path_on_local_machine, path_in_archive)}
1159 # Read the project file and get the directories to add to the package
1162 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1165 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1166 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1167 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1169 paths = {"APPLICATIONPATH" : "applications",
1170 "PRODUCTPATH" : "products",
1172 "MACHINEPATH" : "machines"}
1174 paths["ARCHIVEPATH"] = "archives"
1176 # Loop over the project paths and add it
1177 project_file_name = os.path.basename(project_file_path)
1179 if path not in project_pyconf_cfg:
1182 dest_path = os.path.join("projects", name_project, paths[path])
1183 project_file_dest = os.path.join("projects", name_project, project_file_name)
1185 dest_path = paths[path]
1186 project_file_dest = project_file_name
1188 # Add the directory to the files to add in the package
1189 d_project[path] = (project_pyconf_cfg[path], dest_path)
1191 # Modify the value of the path in the package
1192 project_pyconf_cfg[path] = src.pyconf.Reference(
1195 'project_path + "/' + paths[path] + '"')
1197 # Modify some values
1198 if "project_path" not in project_pyconf_cfg:
1199 project_pyconf_cfg.addMapping("project_path",
1200 src.pyconf.Mapping(project_pyconf_cfg),
1202 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1205 # we don't want to export these two fields
1206 project_pyconf_cfg.__delitem__("file_path")
1207 project_pyconf_cfg.__delitem__("PWD")
1209 project_pyconf_cfg.__delitem__("ARCHIVEPATH")
1211 # Write the project pyconf file
1212 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1213 ff = open(project_pyconf_tmp_path, 'w')
1214 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1215 project_pyconf_cfg.__save__(ff, 1)
1217 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_dest)
1221 def add_readme(config, options, where):
1222 readme_path = os.path.join(where, "README")
1223 with codecs.open(readme_path, "w", 'utf-8') as f:
1225 # templates for building the header
1227 # This package was generated with sat $version
1230 # Distribution : $dist
1232 In the following, $$ROOT represents the directory where you have installed
1233 SALOME (the directory where this file is located).
1236 readme_compilation_with_binaries="""
1238 compilation based on the binaries used as prerequisites
1239 =======================================================
1241 If you fail to compile the complete application (for example because
1242 you are not root on your system and cannot install missing packages), you
1243 may try a partial compilation based on the binaries.
1244 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1245 and do some substitutions on cmake and .la files (replace the build directories
1247 The procedure to do it is:
1248 1) Remove or rename INSTALL directory if it exists
1249 2) Execute the shell script install_bin.sh:
1252 3) Use SalomeTool (as explained in Sources section) and compile only the
1253 modules you need to (with -p option)
1256 readme_header_tpl=string.Template(readme_header)
1257 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1258 "README_BIN.template")
1259 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1260 "README_LAUNCHER.template")
1261 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1262 "README_BIN_VIRTUAL_APP.template")
1263 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1264 "README_SRC.template")
1265 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1266 "README_PROJECT.template")
1267 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1268 "README_SAT.template")
1270 # prepare substitution dictionary
1272 d['user'] = config.VARS.user
1273 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1274 d['version'] = src.get_salometool_version(config)
1275 d['dist'] = config.VARS.dist
1276 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1278 if options.binaries or options.sources:
1279 d['application'] = config.VARS.application
1280 f.write("# Application: " + d['application'] + "\n")
1281 if 'KERNEL' in config.APPLICATION.products:
1282 VersionSalome = src.get_salome_version(config)
1283 # Case where SALOME has the launcher that uses the SalomeContext API
1284 if VersionSalome >= 730:
1285 d['launcher'] = config.APPLICATION.profile.launcher_name
1287 d['virtual_app'] = 'runAppli' # this info is not used now)
1289 # write the specific sections
1290 if options.binaries:
1291 f.write(src.template.substitute(readme_template_path_bin, d))
1292 if "virtual_app" in d:
1293 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1295 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1298 f.write(src.template.substitute(readme_template_path_src, d))
1300 if options.binaries and options.sources:
1301 f.write(readme_compilation_with_binaries)
1304 f.write(src.template.substitute(readme_template_path_pro, d))
1307 f.write(src.template.substitute(readme_template_path_sat, d))
1311 def update_config(config, prop, value):
1312 '''Remove from config.APPLICATION.products the products that have the property given as input.
1314 :param config Config: The global config.
1315 :param prop str: The property to filter
1316 :param value str: The value of the property to filter
1318 # if there is no APPLICATION (ex sat package -t) : nothing to do
1319 if "APPLICATION" in config:
1320 l_product_to_remove = []
1321 for product_name in config.APPLICATION.products.keys():
1322 prod_cfg = src.product.get_product_config(config, product_name)
1323 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1324 l_product_to_remove.append(product_name)
1325 for product_name in l_product_to_remove:
1326 config.APPLICATION.products.__delitem__(product_name)
1329 '''method that is called when salomeTools is called with --help option.
1331 :return: The text to display for the package command description.
1335 The package command creates a tar file archive of a product.
1336 There are four kinds of archive, which can be mixed:
1338 1 - The binary archive.
1339 It contains the product installation directories plus a launcher.
1340 2 - The sources archive.
1341 It contains the product archives, a project (the application plus salomeTools).
1342 3 - The project archive.
1343 It contains a project (give the project file path as argument).
1344 4 - The salomeTools archive.
1345 It contains code utility salomeTools.
1348 >> sat package SALOME-master --binaries --sources""")
1350 def run(args, runner, logger):
1351 '''method that is called when salomeTools is called with package parameter.
1355 (options, args) = parser.parse_args(args)
1357 # Check that a type of package is called, and only one
1358 all_option_types = (options.binaries,
1360 options.project not in ["", None],
1363 # Check if no option for package type
1364 if all_option_types.count(True) == 0:
1365 msg = _("Error: Precise a type for the package\nUse one of the "
1366 "following options: --binaries, --sources, --project or"
1368 logger.write(src.printcolors.printcError(msg), 1)
1369 logger.write("\n", 1)
1372 # The repository where to put the package if not Binary or Source
1373 package_default_path = runner.cfg.LOCAL.workdir
1375 # if the package contains binaries or sources:
1376 if options.binaries or options.sources:
1377 # Check that the command has been called with an application
1378 src.check_config_has_application(runner.cfg)
1380 # Display information
1381 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1382 runner.cfg.VARS.application), 1)
1384 # Get the default directory where to put the packages
1385 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir, "PACKAGE")
1386 src.ensure_path_exists(package_default_path)
1388 # if the package contains a project:
1390 # check that the project is visible by SAT
1391 projectNameFile = options.project + ".pyconf"
1393 for i in runner.cfg.PROJECTS.project_file_paths:
1394 baseName = os.path.basename(i)
1395 if baseName == projectNameFile:
1399 if foundProject is None:
1400 local_path = os.path.join(runner.cfg.VARS.salometoolsway, "data", "local.pyconf")
1401 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1405 Please add it in file:
1407 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1408 logger.write(src.printcolors.printcError(msg), 1)
1409 logger.write("\n", 1)
1412 options.project_file_path = foundProject
1413 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1415 # Remove the products that are filtered by the --without_properties option
1416 if options.without_properties:
1417 app = runner.cfg.APPLICATION
1418 logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1419 prop, value = options.without_properties
1420 update_config(runner.cfg, prop, value)
1421 logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1423 # Remove from config the products that have the not_in_package property
1424 update_config(runner.cfg, "not_in_package", "yes")
1426 # get the name of the archive or build it
1428 if os.path.basename(options.name) == options.name:
1429 # only a name (not a path)
1430 archive_name = options.name
1431 dir_name = package_default_path
1433 archive_name = os.path.basename(options.name)
1434 dir_name = os.path.dirname(options.name)
1436 # suppress extension
1437 if archive_name[-len(".tgz"):] == ".tgz":
1438 archive_name = archive_name[:-len(".tgz")]
1439 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1440 archive_name = archive_name[:-len(".tar.gz")]
1444 dir_name = package_default_path
1445 if options.binaries or options.sources:
1446 archive_name = runner.cfg.APPLICATION.name
1448 if options.binaries:
1449 archive_name += "-"+runner.cfg.VARS.dist
1452 archive_name += "-SRC"
1453 if options.with_vcs:
1454 archive_name += "-VCS"
1457 archive_name += ("salomeTools_" + src.get_salometool_version(runner.cfg))
1462 project_name = options.project
1463 archive_name += ("satproject_" + project_name)
1465 if len(archive_name)==0: # no option worked
1466 msg = _("Error: Cannot name the archive\n"
1467 " check if at least one of the following options was "
1468 "selected : --binaries, --sources, --project or"
1470 logger.write(src.printcolors.printcError(msg), 1)
1471 logger.write("\n", 1)
1474 path_targz = os.path.join(dir_name, archive_name + ".tgz")
1476 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1478 # Create a working directory for all files that are produced during the
1479 # package creation and that will be removed at the end of the command
1480 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1481 src.ensure_path_exists(tmp_working_dir)
1482 logger.write("\n", 5)
1483 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1485 logger.write("\n", 3)
1487 msg = _("Preparation of files to add to the archive")
1488 logger.write(src.printcolors.printcLabel(msg), 2)
1489 logger.write("\n", 2)
1491 d_files_to_add={} # content of the archive
1493 # a dict to hold paths that will need to be substitute for users recompilations
1494 d_paths_to_substitute={}
1496 if options.binaries:
1497 d_bin_files_to_add = binary_package(runner.cfg,
1501 # for all binaries dir, store the substitution that will be required
1502 # for extra compilations
1503 for key in d_bin_files_to_add:
1504 if key.endswith("(bin)"):
1505 source_dir = d_bin_files_to_add[key][0]
1506 path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" + runner.cfg.VARS.dist,"INSTALL")
1507 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1508 # if basename is the same we will just substitute the dirname
1509 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1510 os.path.dirname(path_in_archive)
1512 d_paths_to_substitute[source_dir]=path_in_archive
1514 d_files_to_add.update(d_bin_files_to_add)
1517 d_files_to_add.update(source_package(runner,
1522 if options.binaries:
1523 # for archives with bin and sources we provide a shell script able to
1524 # install binaries for compilation
1525 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1527 d_paths_to_substitute,
1529 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1530 logger.write("substitutions that need to be done later : \n", 5)
1531 logger.write(str(d_paths_to_substitute), 5)
1532 logger.write("\n", 5)
1534 # --salomeTool option is not considered when --sources is selected, as this option
1535 # already brings salomeTool!
1537 d_files_to_add.update(sat_package(runner.cfg, tmp_working_dir,
1541 DBG.write("config for package %s" % project_name, runner.cfg)
1542 d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, options.ftp, tmp_working_dir, options.sat, logger))
1544 if not(d_files_to_add):
1545 msg = _("Error: Empty dictionnary to build the archive!\n")
1546 logger.write(src.printcolors.printcError(msg), 1)
1547 logger.write("\n", 1)
1550 # Add the README file in the package
1551 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1552 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1554 # Add the additional files of option add_files
1555 if options.add_files:
1556 for file_path in options.add_files:
1557 if not os.path.exists(file_path):
1558 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1560 file_name = os.path.basename(file_path)
1561 d_files_to_add[file_name] = (file_path, file_name)
1563 logger.write("\n", 2)
1564 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1565 logger.write("\n", 2)
1566 logger.write("\nfiles and directories to add:\n%s\n\n" % PP.pformat(d_files_to_add), 5)
1570 # Creating the object tarfile
1571 tar = tarfile.open(path_targz, mode='w:gz')
1573 # get the filtering function if needed
1574 filter_function = exclude_VCS_and_extensions
1576 # Add the files to the tarfile object
1577 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1579 except KeyboardInterrupt:
1580 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1581 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1582 # remove the working directory
1583 shutil.rmtree(tmp_working_dir)
1584 logger.write(_("OK"), 1)
1585 logger.write(_("\n"), 1)
1588 # case if no application, only package sat as 'sat package -t'
1590 app = runner.cfg.APPLICATION
1594 # unconditionaly remove the tmp_local_working_dir
1596 tmp_local_working_dir = os.path.join(app.workdir, "tmp_package")
1597 if os.path.isdir(tmp_local_working_dir):
1598 shutil.rmtree(tmp_local_working_dir)
1600 # remove the tmp directory, unless user has registered as developer
1601 if os.path.isdir(tmp_working_dir) and (not DBG.isDeveloper()):
1602 shutil.rmtree(tmp_working_dir)
1604 # Print again the path of the package
1605 logger.write("\n", 2)
1606 src.printcolors.print_value(logger, "Package path", path_targz, 2)