3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
29 from application import get_SALOME_modules
30 import src.debug as DBG
37 ARCHIVE_DIR = "ARCHIVES"
38 PROJECT_DIR = "PROJECT"
40 IGNORED_DIRS = [".git", ".svn"]
41 IGNORED_EXTENSIONS = []
43 PROJECT_TEMPLATE = """#!/usr/bin/env python
46 # The path to the archive root directory
47 root_path : $PWD + "/../"
49 project_path : $PWD + "/"
51 # Where to search the archives of the products
52 ARCHIVEPATH : $root_path + "ARCHIVES"
53 # Where to search the pyconf of the applications
54 APPLICATIONPATH : $project_path + "applications/"
55 # Where to search the pyconf of the products
56 PRODUCTPATH : $project_path + "products/"
57 # Where to search the pyconf of the jobs of the project
58 JOBPATH : $project_path + "jobs/"
59 # Where to search the pyconf of the machines of the project
60 MACHINEPATH : $project_path + "machines/"
63 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
71 archive_dir : 'default'
78 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
79 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
83 # Define all possible option for the package command : sat package <options>
84 parser = src.options.Options()
85 parser.add_option('b', 'binaries', 'boolean', 'binaries',
86 _('Optional: Produce a binary package.'), False)
87 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
88 _('Optional: Only binary package: produce the archive even if '
89 'there are some missing products.'), False)
90 parser.add_option('s', 'sources', 'boolean', 'sources',
91 _('Optional: Produce a compilable archive of the sources of the '
92 'application.'), False)
93 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
94 _('Optional: Only source package: do not make archive of vcs products.'),
96 parser.add_option('p', 'project', 'string', 'project',
97 _('Optional: Produce an archive that contains a project.'), "")
98 parser.add_option('t', 'salometools', 'boolean', 'sat',
99 _('Optional: Produce an archive that contains salomeTools.'), False)
100 parser.add_option('n', 'name', 'string', 'name',
101 _('Optional: The name or full path of the archive.'), None)
102 parser.add_option('', 'add_files', 'list2', 'add_files',
103 _('Optional: The list of additional files to add to the archive.'), [])
104 parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
105 _('Optional: do not add commercial licence.'), False)
106 parser.add_option('', 'without_property', 'string', 'without_property',
107 _('Optional: Filter the products by their properties.\n\tSyntax: '
108 '--without_property <property>:<value>'))
111 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
112 '''Create an archive containing all directories and files that are given in
113 the d_content argument.
115 :param tar tarfile: The tarfile instance used to make the archive.
116 :param name_archive str: The name of the archive to make.
117 :param d_content dict: The dictionary that contain all directories and files
118 to add in the archive.
120 (path_on_local_machine, path_in_archive)
121 :param logger Logger: the logging instance
122 :param f_exclude Function: the function that filters
123 :return: 0 if success, 1 if not.
126 # get the max length of the messages in order to make the display
127 max_len = len(max(d_content.keys(), key=len))
130 # loop over each directory or file stored in the d_content dictionary
131 for name in sorted(d_content.keys()):
132 # display information
133 len_points = max_len - len(name)
134 local_path, archive_path = d_content[name]
135 in_archive = os.path.join(name_archive, archive_path)
136 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
137 # Get the local path and the path in archive
138 # of the directory or file to add
139 # Add it in the archive
141 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
142 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
143 except Exception as e:
144 logger.write(src.printcolors.printcError(_("KO ")), 3)
145 logger.write(str(e), 3)
147 logger.write("\n", 3)
150 def exclude_VCS_and_extensions(filename):
151 ''' The function that is used to exclude from package the link to the
152 VCS repositories (like .git)
154 :param filename Str: The filname to exclude (or not).
155 :return: True if the file has to be exclude
158 for dir_name in IGNORED_DIRS:
159 if dir_name in filename:
161 for extension in IGNORED_EXTENSIONS:
162 if filename.endswith(extension):
166 def produce_relative_launcher(config,
171 with_commercial=True):
172 '''Create a specific SALOME launcher for the binary package. This launcher
175 :param config Config: The global configuration.
176 :param logger Logger: the logging instance
177 :param file_dir str: the directory where to put the launcher
178 :param file_name str: The launcher name
179 :param binaries_dir_name str: the name of the repository where the binaries
181 :return: the path of the produced launcher
185 # get KERNEL installation path
186 kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
188 # set kernel bin dir (considering fhs property)
189 kernel_cfg = src.product.get_product_config(config, "KERNEL")
190 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
191 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
193 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
195 # Get the launcher template and do substitutions
196 withProfile = src.fileEnviron.withProfile
198 withProfile = withProfile.replace(
199 "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
200 "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + kernel_root_dir + "'")
201 withProfile = withProfile.replace(
202 " 'BIN_KERNEL_INSTALL_DIR'",
203 " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
205 before, after = withProfile.split(
206 "# here your local standalone environment\n")
208 # create an environment file writer
209 writer = src.environment.FileEnvWriter(config,
214 filepath = os.path.join(file_dir, file_name)
215 # open the file and write into it
216 launch_file = open(filepath, "w")
217 launch_file.write(before)
219 writer.write_cfgForPy_file(launch_file,
220 for_package = binaries_dir_name,
221 with_commercial=with_commercial)
222 launch_file.write(after)
225 # Little hack to put out_dir_Path outside the strings
226 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
228 # A hack to put a call to a file for distene licence.
229 # It does nothing to an application that has no distene product
230 hack_for_distene_licence(filepath)
232 # change the rights in order to make the file executable for everybody
244 def hack_for_distene_licence(filepath):
245 '''Replace the distene licence env variable by a call to a file.
247 :param filepath Str: The path to the launcher to modify.
249 shutil.move(filepath, filepath + "_old")
251 filein = filepath + "_old"
252 fin = open(filein, "r")
253 fout = open(fileout, "w")
254 text = fin.readlines()
255 # Find the Distene section
257 for i,line in enumerate(text):
258 if "# Set DISTENE License" in line:
262 # No distene product, there is nothing to do
268 del text[num_line +1]
269 del text[num_line +1]
270 text_to_insert =""" import imp
272 distene = imp.load_source('distene_licence', '/data/tmpsalome/salome/prerequis/install/LICENSE/dlim8.var.py')
273 distene.set_distene_variables(context)
276 text.insert(num_line + 1, text_to_insert)
283 def produce_relative_env_files(config,
287 '''Create some specific environment files for the binary package. These
288 files use relative paths.
290 :param config Config: The global configuration.
291 :param logger Logger: the logging instance
292 :param file_dir str: the directory where to put the files
293 :param binaries_dir_name str: the name of the repository where the binaries
295 :return: the list of path of the produced environment files
298 # create an environment file writer
299 writer = src.environment.FileEnvWriter(config,
305 filepath = writer.write_env_file("env_launch.sh",
308 for_package = binaries_dir_name)
310 # Little hack to put out_dir_Path as environment variable
311 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
313 # change the rights in order to make the file executable for everybody
325 def produce_install_bin_file(config,
330 '''Create a bash shell script which do substitutions in BIRARIES dir
331 in order to use it for extra compilations.
333 :param config Config: The global configuration.
334 :param logger Logger: the logging instance
335 :param file_dir str: the directory where to put the files
336 :param d_sub, dict: the dictionnary that contains the substitutions to be done
337 :param file_name str: the name of the install script file
338 :return: the produced file
342 filepath = os.path.join(file_dir, file_name)
343 # open the file and write into it
344 # use codec utf-8 as sat variables are in unicode
345 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
346 installbin_template_path = os.path.join(config.VARS.internal_dir,
347 "INSTALL_BIN.template")
349 # build the name of the directory that will contain the binaries
350 binaries_dir_name = "BINARIES-" + config.VARS.dist
351 # build the substitution loop
352 loop_cmd = "for f in $(grep -RIl"
354 loop_cmd += " -e "+ key
355 loop_cmd += ' INSTALL); do\n sed -i "\n'
357 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
358 loop_cmd += ' " $f\ndone'
361 d["BINARIES_DIR"] = binaries_dir_name
362 d["SUBSTITUTION_LOOP"]=loop_cmd
364 # substitute the template and write it in file
365 content=src.template.substitute(installbin_template_path, d)
366 installbin_file.write(content)
367 # change the rights in order to make the file executable for everybody
379 def product_appli_creation_script(config,
383 '''Create a script that can produce an application (EDF style) in the binary
386 :param config Config: The global configuration.
387 :param logger Logger: the logging instance
388 :param file_dir str: the directory where to put the file
389 :param binaries_dir_name str: the name of the repository where the binaries
391 :return: the path of the produced script file
394 template_name = "create_appli.py.for_bin_packages.template"
395 template_path = os.path.join(config.VARS.internal_dir, template_name)
396 text_to_fill = open(template_path, "r").read()
397 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
398 '"' + binaries_dir_name + '"')
401 for product_name in get_SALOME_modules(config):
402 product_info = src.product.get_product_config(config, product_name)
404 if src.product.product_is_smesh_plugin(product_info):
407 if 'install_dir' in product_info and bool(product_info.install_dir):
408 if src.product.product_is_cpp(product_info):
410 for cpp_name in src.product.get_product_components(product_info):
411 line_to_add = ("<module name=\"" +
413 "\" gui=\"yes\" path=\"''' + "
414 "os.path.join(dir_bin_name, \"" +
415 cpp_name + "\") + '''\"/>")
418 line_to_add = ("<module name=\"" +
420 "\" gui=\"yes\" path=\"''' + "
421 "os.path.join(dir_bin_name, \"" +
422 product_name + "\") + '''\"/>")
423 text_to_add += line_to_add + "\n"
425 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
427 tmp_file_path = os.path.join(file_dir, "create_appli.py")
428 ff = open(tmp_file_path, "w")
429 ff.write(filled_text)
432 # change the rights in order to make the file executable for everybody
433 os.chmod(tmp_file_path,
444 def binary_package(config, logger, options, tmp_working_dir):
445 '''Prepare a dictionary that stores all the needed directories and files to
446 add in a binary package.
448 :param config Config: The global configuration.
449 :param logger Logger: the logging instance
450 :param options OptResult: the options of the launched command
451 :param tmp_working_dir str: The temporary local directory containing some
452 specific directories or files needed in the
454 :return: the dictionary that stores all the needed directories and files to
455 add in a binary package.
456 {label : (path_on_local_machine, path_in_archive)}
460 # Get the list of product installation to add to the archive
461 l_products_name = sorted(config.APPLICATION.products.keys())
462 l_product_info = src.product.get_products_infos(l_products_name,
467 l_sources_not_present = []
468 for prod_name, prod_info in l_product_info:
470 # Add the sources of the products that have the property
471 # sources_in_package : "yes"
472 if src.get_property_in_product_cfg(prod_info,
473 "sources_in_package") == "yes":
474 if os.path.exists(prod_info.source_dir):
475 l_source_dir.append((prod_name, prod_info.source_dir))
477 l_sources_not_present.append(prod_name)
479 # ignore the native and fixed products for install directories
480 if (src.product.product_is_native(prod_info)
481 or src.product.product_is_fixed(prod_info)
482 or not src.product.product_compiles(prod_info)):
484 if src.product.check_installation(prod_info):
485 l_install_dir.append((prod_name, prod_info.install_dir))
487 l_not_installed.append(prod_name)
489 # Add also the cpp generated modules (if any)
490 if src.product.product_is_cpp(prod_info):
492 for name_cpp in src.product.get_product_components(prod_info):
493 install_dir = os.path.join(config.APPLICATION.workdir,
495 if os.path.exists(install_dir):
496 l_install_dir.append((name_cpp, install_dir))
498 l_not_installed.append(name_cpp)
500 # check the name of the directory that (could) contains the binaries
501 # from previous detar
502 binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
503 if os.path.exists(binaries_from_detar):
505 WARNING: existing binaries directory from previous detar installation:
507 To make new package from this, you could:
508 1) install binaries in INSTALL directory with the script "install_bin.sh"
509 see README file for more details
510 2) recompile everything in INSTALL with "sat compile" command
511 this step is long, and requires some linux packages to be installed
513 """ % binaries_from_detar)
515 # Print warning or error if there are some missing products
516 if len(l_not_installed) > 0:
517 text_missing_prods = ""
518 for p_name in l_not_installed:
519 text_missing_prods += "-" + p_name + "\n"
520 if not options.force_creation:
521 msg = _("ERROR: there are missing products installations:")
522 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
527 msg = _("WARNING: there are missing products installations:")
528 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
532 # Do the same for sources
533 if len(l_sources_not_present) > 0:
534 text_missing_prods = ""
535 for p_name in l_sources_not_present:
536 text_missing_prods += "-" + p_name + "\n"
537 if not options.force_creation:
538 msg = _("ERROR: there are missing products sources:")
539 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
544 msg = _("WARNING: there are missing products sources:")
545 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
549 # construct the name of the directory that will contain the binaries
550 binaries_dir_name = "BINARIES-" + config.VARS.dist
552 # construct the correlation table between the product names, there
553 # actual install directories and there install directory in archive
555 for prod_name, install_dir in l_install_dir:
556 path_in_archive = os.path.join(binaries_dir_name, prod_name)
557 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
559 for prod_name, source_dir in l_source_dir:
560 path_in_archive = os.path.join("SOURCES", prod_name)
561 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
563 # for packages of SALOME applications including KERNEL,
564 # we produce a salome launcher or a virtual application (depending on salome version)
565 if 'KERNEL' in config.APPLICATION.products:
566 VersionSalome = src.get_salome_version(config)
567 # Case where SALOME has the launcher that uses the SalomeContext API
568 if VersionSalome >= 730:
569 # create the relative launcher and add it to the files to add
570 launcher_name = src.get_launcher_name(config)
571 launcher_package = produce_relative_launcher(config,
576 not(options.without_commercial))
578 d_products["launcher"] = (launcher_package, launcher_name)
580 # if we mix binaries and sources, we add a copy of the launcher,
581 # prefixed with "bin",in order to avoid clashes
582 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
584 # Provide a script for the creation of an application EDF style
585 appli_script = product_appli_creation_script(config,
590 d_products["appli script"] = (appli_script, "create_appli.py")
592 # Put also the environment file
593 env_file = produce_relative_env_files(config,
598 d_products["environment file"] = (env_file, "env_launch.sh")
602 def source_package(sat, config, logger, options, tmp_working_dir):
603 '''Prepare a dictionary that stores all the needed directories and files to
604 add in a source package.
606 :param config Config: The global configuration.
607 :param logger Logger: the logging instance
608 :param options OptResult: the options of the launched command
609 :param tmp_working_dir str: The temporary local directory containing some
610 specific directories or files needed in the
612 :return: the dictionary that stores all the needed directories and files to
613 add in a source package.
614 {label : (path_on_local_machine, path_in_archive)}
618 # Get all the products that are prepared using an archive
619 logger.write("Find archive products ... ")
620 d_archives, l_pinfo_vcs = get_archives(config, logger)
621 logger.write("Done\n")
623 if not options.with_vcs and len(l_pinfo_vcs) > 0:
624 # Make archives with the products that are not prepared using an archive
625 # (git, cvs, svn, etc)
626 logger.write("Construct archives for vcs products ... ")
627 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
632 logger.write("Done\n")
635 logger.write("Create the project ... ")
636 d_project = create_project_for_src_package(config,
639 logger.write("Done\n")
642 tmp_sat = add_salomeTools(config, tmp_working_dir)
643 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
645 # Add a sat symbolic link if not win
646 if not src.architecture.is_windows():
647 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
651 # In the jobs, os.getcwd() can fail
652 t = config.LOCAL.workdir
653 os.chdir(tmp_working_dir)
654 if os.path.lexists(tmp_satlink_path):
655 os.remove(tmp_satlink_path)
656 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
659 d_sat["sat link"] = (tmp_satlink_path, "sat")
661 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
664 def get_archives(config, logger):
665 '''Find all the products that are get using an archive and all the products
666 that are get using a vcs (git, cvs, svn) repository.
668 :param config Config: The global configuration.
669 :param logger Logger: the logging instance
670 :return: the dictionary {name_product :
671 (local path of its archive, path in the package of its archive )}
672 and the list of specific configuration corresponding to the vcs
676 # Get the list of product informations
677 l_products_name = config.APPLICATION.products.keys()
678 l_product_info = src.product.get_products_infos(l_products_name,
682 for p_name, p_info in l_product_info:
683 # ignore the native and fixed products
684 if (src.product.product_is_native(p_info)
685 or src.product.product_is_fixed(p_info)):
687 if p_info.get_source == "archive":
688 archive_path = p_info.archive_info.archive_name
689 archive_name = os.path.basename(archive_path)
691 l_pinfo_vcs.append((p_name, p_info))
693 d_archives[p_name] = (archive_path,
694 os.path.join(ARCHIVE_DIR, archive_name))
695 return d_archives, l_pinfo_vcs
697 def add_salomeTools(config, tmp_working_dir):
698 '''Prepare a version of salomeTools that has a specific local.pyconf file
699 configured for a source package.
701 :param config Config: The global configuration.
702 :param tmp_working_dir str: The temporary local directory containing some
703 specific directories or files needed in the
705 :return: The path to the local salomeTools directory to add in the package
708 # Copy sat in the temporary working directory
709 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
710 sat_running_path = src.Path(config.VARS.salometoolsway)
711 sat_running_path.copy(sat_tmp_path)
713 # Update the local.pyconf file that contains the path to the project
714 local_pyconf_name = "local.pyconf"
715 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
716 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
717 # Remove the .pyconf file in the root directory of salomeTools if there is
718 # any. (For example when launching jobs, a pyconf file describing the jobs
719 # can be here and is not useful)
720 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
721 for file_or_dir in files_or_dir_SAT:
722 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
723 file_path = os.path.join(tmp_working_dir,
728 ff = open(local_pyconf_file, "w")
729 ff.write(LOCAL_TEMPLATE)
732 return sat_tmp_path.path
734 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
735 '''For sources package that require that all products are get using an
736 archive, one has to create some archive for the vcs products.
737 So this method calls the clean and source command of sat and then create
740 :param l_pinfo_vcs List: The list of specific configuration corresponding to
742 :param sat Sat: The Sat instance that can be called to clean and source the
744 :param config Config: The global configuration.
745 :param logger Logger: the logging instance
746 :param tmp_working_dir str: The temporary local directory containing some
747 specific directories or files needed in the
749 :return: the dictionary that stores all the archives to add in the source
750 package. {label : (path_on_local_machine, path_in_archive)}
753 # clean the source directory of all the vcs products, then use the source
754 # command and thus construct an archive that will not contain the patches
755 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
758 logger.write(_("\nclean sources\n"))
759 args_clean = config.VARS.application
760 args_clean += " --sources --products "
761 args_clean += ",".join(l_prod_names)
762 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
763 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
766 logger.write(_("get sources\n"))
767 args_source = config.VARS.application
768 args_source += " --products "
769 args_source += ",".join(l_prod_names)
770 svgDir = sat.cfg.APPLICATION.workdir
771 sat.cfg.APPLICATION.workdir = tmp_working_dir
772 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
773 DBG.write("sat config id", id(sat.cfg), True)
774 # shit as config is not same id() as for sat.source()
775 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
777 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
779 # make the new archives
781 for pn, pinfo in l_pinfo_vcs:
782 path_archive = make_archive(pn, pinfo, tmp_working_dir)
783 logger.write("make archive vcs '%s'\n" % path_archive)
784 d_archives_vcs[pn] = (path_archive,
785 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
786 sat.cfg.APPLICATION.workdir = svgDir
787 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
788 return d_archives_vcs
790 def make_archive(prod_name, prod_info, where):
791 '''Create an archive of a product by searching its source directory.
793 :param prod_name str: The name of the product.
794 :param prod_info Config: The specific configuration corresponding to the
796 :param where str: The path of the repository where to put the resulting
798 :return: The path of the resulting archive
801 path_targz_prod = os.path.join(where, prod_name + ".tgz")
802 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
803 local_path = prod_info.source_dir
804 tar_prod.add(local_path,
806 exclude=exclude_VCS_and_extensions)
808 return path_targz_prod
810 def create_project_for_src_package(config, tmp_working_dir, with_vcs):
811 '''Create a specific project for a source package.
813 :param config Config: The global configuration.
814 :param tmp_working_dir str: The temporary local directory containing some
815 specific directories or files needed in the
817 :param with_vcs boolean: True if the package is with vcs products (not
818 transformed into archive products)
819 :return: The dictionary
820 {"project" : (produced project, project path in the archive)}
824 # Create in the working temporary directory the full project tree
825 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
826 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
828 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
831 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
834 patches_tmp_dir = os.path.join(project_tmp_dir,
837 application_tmp_dir = os.path.join(project_tmp_dir,
839 for directory in [project_tmp_dir,
840 compil_scripts_tmp_dir,
843 application_tmp_dir]:
844 src.ensure_path_exists(directory)
846 # Create the pyconf that contains the information of the project
847 project_pyconf_name = "project.pyconf"
848 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
849 ff = open(project_pyconf_file, "w")
850 ff.write(PROJECT_TEMPLATE)
853 # Loop over the products to get there pyconf and all the scripts
854 # (compilation, environment, patches)
855 # and create the pyconf file to add to the project
856 lproducts_name = config.APPLICATION.products.keys()
857 l_products = src.product.get_products_infos(lproducts_name, config)
858 for p_name, p_info in l_products:
859 find_product_scripts_and_pyconf(p_name,
863 compil_scripts_tmp_dir,
866 products_pyconf_tmp_dir)
868 find_application_pyconf(config, application_tmp_dir)
870 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
873 def find_product_scripts_and_pyconf(p_name,
877 compil_scripts_tmp_dir,
880 products_pyconf_tmp_dir):
881 '''Create a specific pyconf file for a given product. Get its environment
882 script, its compilation script and patches and put it in the temporary
883 working directory. This method is used in the source package in order to
884 construct the specific project.
886 :param p_name str: The name of the product.
887 :param p_info Config: The specific configuration corresponding to the
889 :param config Config: The global configuration.
890 :param with_vcs boolean: True if the package is with vcs products (not
891 transformed into archive products)
892 :param compil_scripts_tmp_dir str: The path to the temporary compilation
893 scripts directory of the project.
894 :param env_scripts_tmp_dir str: The path to the temporary environment script
895 directory of the project.
896 :param patches_tmp_dir str: The path to the temporary patch scripts
897 directory of the project.
898 :param products_pyconf_tmp_dir str: The path to the temporary product
899 scripts directory of the project.
902 # read the pyconf of the product
903 product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
904 config.PATHS.PRODUCTPATH)
905 product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
907 # find the compilation script if any
908 if src.product.product_has_script(p_info):
909 compil_script_path = src.Path(p_info.compil_script)
910 compil_script_path.copy(compil_scripts_tmp_dir)
911 product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
912 p_info.compil_script)
913 # find the environment script if any
914 if src.product.product_has_env_script(p_info):
915 env_script_path = src.Path(p_info.environ.env_script)
916 env_script_path.copy(env_scripts_tmp_dir)
917 product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
918 p_info.environ.env_script)
919 # find the patches if any
920 if src.product.product_has_patches(p_info):
921 patches = src.pyconf.Sequence()
922 for patch_path in p_info.patches:
923 p_path = src.Path(patch_path)
924 p_path.copy(patches_tmp_dir)
925 patches.append(os.path.basename(patch_path), "")
927 product_pyconf_cfg[p_info.section].patches = patches
930 # put in the pyconf file the resolved values
931 for info in ["git_info", "cvs_info", "svn_info"]:
933 for key in p_info[info]:
934 product_pyconf_cfg[p_info.section][info][key] = p_info[
937 # if the product is not archive, then make it become archive.
938 if src.product.product_is_vcs(p_info):
939 product_pyconf_cfg[p_info.section].get_source = "archive"
940 if not "archive_info" in product_pyconf_cfg[p_info.section]:
941 product_pyconf_cfg[p_info.section].addMapping("archive_info",
942 src.pyconf.Mapping(product_pyconf_cfg),
944 product_pyconf_cfg[p_info.section
945 ].archive_info.archive_name = p_info.name + ".tgz"
947 # write the pyconf file to the temporary project location
948 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
950 ff = open(product_tmp_pyconf_path, 'w')
951 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
952 product_pyconf_cfg.__save__(ff, 1)
955 def find_application_pyconf(config, application_tmp_dir):
956 '''Find the application pyconf file and put it in the specific temporary
957 directory containing the specific project of a source package.
959 :param config Config: The global configuration.
960 :param application_tmp_dir str: The path to the temporary application
961 scripts directory of the project.
963 # read the pyconf of the application
964 application_name = config.VARS.application
965 application_pyconf_path = src.find_file_in_lpath(
966 application_name + ".pyconf",
967 config.PATHS.APPLICATIONPATH)
968 application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
971 application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
972 application_pyconf_cfg,
974 'VARS.salometoolsway + $VARS.sep + ".."')
976 # Prevent from compilation in base
977 application_pyconf_cfg.APPLICATION.no_base = "yes"
979 # write the pyconf file to the temporary application location
980 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
981 application_name + ".pyconf")
982 ff = open(application_tmp_pyconf_path, 'w')
983 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
984 application_pyconf_cfg.__save__(ff, 1)
987 def project_package(config, name_project, project_file_path, tmp_working_dir, logger):
988 '''Prepare a dictionary that stores all the needed directories and files to
989 add in a project package.
991 :param project_file_path str: The path to the local project.
992 :param tmp_working_dir str: The temporary local directory containing some
993 specific directories or files needed in the
995 :return: the dictionary that stores all the needed directories and files to
996 add in a project package.
997 {label : (path_on_local_machine, path_in_archive)}
1001 # Read the project file and get the directories to add to the package
1004 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1007 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1008 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1009 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1011 paths = {"ARCHIVEPATH" : "archives",
1012 "APPLICATIONPATH" : "applications",
1013 "PRODUCTPATH" : "products",
1015 "MACHINEPATH" : "machines"}
1016 # Loop over the project paths and add it
1018 if path not in project_pyconf_cfg:
1020 # Add the directory to the files to add in the package
1021 d_project[path] = (project_pyconf_cfg[path], paths[path])
1022 # Modify the value of the path in the package
1023 project_pyconf_cfg[path] = src.pyconf.Reference(
1026 'project_path + "/' + paths[path] + '"')
1028 # Modify some values
1029 if "project_path" not in project_pyconf_cfg:
1030 project_pyconf_cfg.addMapping("project_path",
1031 src.pyconf.Mapping(project_pyconf_cfg),
1033 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1037 # Write the project pyconf file
1038 project_file_name = os.path.basename(project_file_path)
1039 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1040 ff = open(project_pyconf_tmp_path, 'w')
1041 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1042 project_pyconf_cfg.__save__(ff, 1)
1044 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_name)
1048 def add_readme(config, options, where):
1049 readme_path = os.path.join(where, "README")
1050 with codecs.open(readme_path, "w", 'utf-8') as f:
1052 # templates for building the header
1054 # This package was generated with sat $version
1057 # Distribution : $dist
1059 In the following, $$ROOT represents the directory where you have installed
1060 SALOME (the directory where this file is located).
1063 readme_compilation_with_binaries="""
1065 compilation based on the binaries used as prerequisites
1066 =======================================================
1068 If you fail to compile the complete application (for example because
1069 you are not root on your system and cannot install missing packages), you
1070 may try a partial compilation based on the binaries.
1071 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1072 and do some substitutions on cmake and .la files (replace the build directories
1074 The procedure to do it is:
1075 1) Remove or rename INSTALL directory if it exists
1076 2) Execute the shell script install_bin.sh:
1079 3) Use SalomeTool (as explained in Sources section) and compile only the
1080 modules you need to (with -p option)
1083 readme_header_tpl=string.Template(readme_header)
1084 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1085 "README_BIN.template")
1086 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1087 "README_LAUNCHER.template")
1088 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1089 "README_BIN_VIRTUAL_APP.template")
1090 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1091 "README_SRC.template")
1092 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1093 "README_PROJECT.template")
1094 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1095 "README_SAT.template")
1097 # prepare substitution dictionary
1099 d['user'] = config.VARS.user
1100 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1101 d['version'] = config.INTERNAL.sat_version
1102 d['dist'] = config.VARS.dist
1103 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1105 if options.binaries or options.sources:
1106 d['application'] = config.VARS.application
1107 f.write("# Application: " + d['application'] + "\n")
1108 if 'KERNEL' in config.APPLICATION.products:
1109 VersionSalome = src.get_salome_version(config)
1110 # Case where SALOME has the launcher that uses the SalomeContext API
1111 if VersionSalome >= 730:
1112 d['launcher'] = config.APPLICATION.profile.launcher_name
1114 d['virtual_app'] = 'runAppli' # this info is not used now)
1116 # write the specific sections
1117 if options.binaries:
1118 f.write(src.template.substitute(readme_template_path_bin, d))
1119 if "virtual_app" in d:
1120 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1122 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1125 f.write(src.template.substitute(readme_template_path_src, d))
1127 if options.binaries and options.sources:
1128 f.write(readme_compilation_with_binaries)
1131 f.write(src.template.substitute(readme_template_path_pro, d))
1134 f.write(src.template.substitute(readme_template_path_sat, d))
1138 def update_config(config, prop, value):
1139 '''Remove from config.APPLICATION.products the products that have the property given as input.
1141 :param config Config: The global config.
1142 :param prop str: The property to filter
1143 :param value str: The value of the property to filter
1145 src.check_config_has_application(config)
1146 l_product_to_remove = []
1147 for product_name in config.APPLICATION.products.keys():
1148 prod_cfg = src.product.get_product_config(config, product_name)
1149 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1150 l_product_to_remove.append(product_name)
1151 for product_name in l_product_to_remove:
1152 config.APPLICATION.products.__delitem__(product_name)
1155 '''method that is called when salomeTools is called with --help option.
1157 :return: The text to display for the package command description.
1160 return _("The package command creates an archive.\nThere are 4 kinds of "
1161 "archive, which can be mixed:\n 1- The binary archive. It contains all the product "
1162 "installation directories and a launcher,\n 2- The sources archive."
1163 " It contains the products archives, a project corresponding to "
1164 "the application and salomeTools,\n 3- The project archive. It "
1165 "contains a project (give the project file path as argument),\n 4-"
1166 " The salomeTools archive. It contains salomeTools.\n\nexample:"
1167 "\nsat package SALOME-master --bineries --sources")
1169 def run(args, runner, logger):
1170 '''method that is called when salomeTools is called with package parameter.
1174 (options, args) = parser.parse_args(args)
1176 # Check that a type of package is called, and only one
1177 all_option_types = (options.binaries,
1179 options.project not in ["", None],
1182 # Check if no option for package type
1183 if all_option_types.count(True) == 0:
1184 msg = _("Error: Precise a type for the package\nUse one of the "
1185 "following options: --binaries, --sources, --project or"
1187 logger.write(src.printcolors.printcError(msg), 1)
1188 logger.write("\n", 1)
1191 # The repository where to put the package if not Binary or Source
1192 package_default_path = runner.cfg.LOCAL.workdir
1194 # if the package contains binaries or sources:
1195 if options.binaries or options.sources:
1196 # Check that the command has been called with an application
1197 src.check_config_has_application(runner.cfg)
1199 # Display information
1200 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1201 runner.cfg.VARS.application), 1)
1203 # Get the default directory where to put the packages
1204 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir,
1206 src.ensure_path_exists(package_default_path)
1208 # if the package contains a project:
1210 # check that the project is visible by SAT
1211 projectNameFile = options.project + ".pyconf"
1213 for i in runner.cfg.PROJECTS.project_file_paths:
1214 baseName = os.path.basename(i)
1215 if baseName == projectNameFile:
1219 if foundProject is None:
1220 local_path = os.path.join(runner.cfg.VARS.salometoolsway,
1223 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1227 Please add it in file:
1229 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1230 logger.write(src.printcolors.printcError(msg), 1)
1231 logger.write("\n", 1)
1234 options.project_file_path = foundProject
1235 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1237 # Remove the products that are filtered by the --without_property option
1238 if options.without_property:
1239 [prop, value] = options.without_property.split(":")
1240 update_config(runner.cfg, prop, value)
1242 # get the name of the archive or build it
1244 if os.path.basename(options.name) == options.name:
1245 # only a name (not a path)
1246 archive_name = options.name
1247 dir_name = package_default_path
1249 archive_name = os.path.basename(options.name)
1250 dir_name = os.path.dirname(options.name)
1252 # suppress extension
1253 if archive_name[-len(".tgz"):] == ".tgz":
1254 archive_name = archive_name[:-len(".tgz")]
1255 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1256 archive_name = archive_name[:-len(".tar.gz")]
1260 dir_name = package_default_path
1261 if options.binaries or options.sources:
1262 archive_name = runner.cfg.APPLICATION.name
1264 if options.binaries:
1265 archive_name += "-"+runner.cfg.VARS.dist
1268 archive_name += "-SRC"
1269 if options.with_vcs:
1270 archive_name += "-VCS"
1273 project_name = options.project
1274 archive_name += ("PROJECT-" + project_name)
1277 archive_name += ("salomeTools_" + runner.cfg.INTERNAL.sat_version)
1278 if len(archive_name)==0: # no option worked
1279 msg = _("Error: Cannot name the archive\n"
1280 " check if at least one of the following options was "
1281 "selected : --binaries, --sources, --project or"
1283 logger.write(src.printcolors.printcError(msg), 1)
1284 logger.write("\n", 1)
1287 path_targz = os.path.join(dir_name, archive_name + ".tgz")
1289 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1291 # Create a working directory for all files that are produced during the
1292 # package creation and that will be removed at the end of the command
1293 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root,
1294 runner.cfg.VARS.datehour)
1295 src.ensure_path_exists(tmp_working_dir)
1296 logger.write("\n", 5)
1297 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1299 logger.write("\n", 3)
1301 msg = _("Preparation of files to add to the archive")
1302 logger.write(src.printcolors.printcLabel(msg), 2)
1303 logger.write("\n", 2)
1305 d_files_to_add={} # content of the archive
1307 # a dict to hold paths that will need to be substitute for users recompilations
1308 d_paths_to_substitute={}
1310 if options.binaries:
1311 d_bin_files_to_add = binary_package(runner.cfg,
1315 # for all binaries dir, store the substitution that will be required
1316 # for extra compilations
1317 for key in d_bin_files_to_add:
1318 if key.endswith("(bin)"):
1319 source_dir = d_bin_files_to_add[key][0]
1320 path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" + runner.cfg.VARS.dist,"INSTALL")
1321 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1322 # if basename is the same we will just substitute the dirname
1323 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1324 os.path.dirname(path_in_archive)
1326 d_paths_to_substitute[source_dir]=path_in_archive
1328 d_files_to_add.update(d_bin_files_to_add)
1331 d_files_to_add.update(source_package(runner,
1336 if options.binaries:
1337 # for archives with bin and sources we provide a shell script able to
1338 # install binaries for compilation
1339 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1341 d_paths_to_substitute,
1343 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1344 logger.write("substitutions that need to be done later : \n", 5)
1345 logger.write(str(d_paths_to_substitute), 5)
1346 logger.write("\n", 5)
1348 # --salomeTool option is not considered when --sources is selected, as this option
1349 # already brings salomeTool!
1351 d_files_to_add.update({"salomeTools" : (runner.cfg.VARS.salometoolsway, "")})
1355 DBG.write("config for package %s" % project_name, runner.cfg)
1356 d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, tmp_working_dir, logger))
1358 if not(d_files_to_add):
1359 msg = _("Error: Empty dictionnary to build the archive!\n")
1360 logger.write(src.printcolors.printcError(msg), 1)
1361 logger.write("\n", 1)
1364 # Add the README file in the package
1365 local_readme_tmp_path = add_readme(runner.cfg,
1368 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1370 # Add the additional files of option add_files
1371 if options.add_files:
1372 for file_path in options.add_files:
1373 if not os.path.exists(file_path):
1374 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1376 file_name = os.path.basename(file_path)
1377 d_files_to_add[file_name] = (file_path, file_name)
1379 logger.write("\n", 2)
1381 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1382 logger.write("\n", 2)
1385 # Creating the object tarfile
1386 tar = tarfile.open(path_targz, mode='w:gz')
1388 # get the filtering function if needed
1389 filter_function = exclude_VCS_and_extensions
1391 # Add the files to the tarfile object
1392 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1394 except KeyboardInterrupt:
1395 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1396 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1397 # remove the working directory
1398 shutil.rmtree(tmp_working_dir)
1399 logger.write(_("OK"), 1)
1400 logger.write(_("\n"), 1)
1403 # remove the working directory
1404 DBG.tofix("make shutil.rmtree(tmp_working_dir) effective", "", True)
1405 # shutil.rmtree(tmp_working_dir)
1407 # Print again the path of the package
1408 logger.write("\n", 2)
1409 src.printcolors.print_value(logger, "Package path", path_targz, 2)