3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
30 from application import get_SALOME_modules
31 import src.debug as DBG
38 ARCHIVE_DIR = "ARCHIVES"
39 PROJECT_DIR = "PROJECT"
41 IGNORED_DIRS = [".git", ".svn"]
42 IGNORED_EXTENSIONS = []
44 PROJECT_TEMPLATE = """#!/usr/bin/env python
47 # The path to the archive root directory
48 root_path : $PWD + "/../"
50 project_path : $PWD + "/"
52 # Where to search the archives of the products
53 ARCHIVEPATH : $root_path + "ARCHIVES"
54 # Where to search the pyconf of the applications
55 APPLICATIONPATH : $project_path + "applications/"
56 # Where to search the pyconf of the products
57 PRODUCTPATH : $project_path + "products/"
58 # Where to search the pyconf of the jobs of the project
59 JOBPATH : $project_path + "jobs/"
60 # Where to search the pyconf of the machines of the project
61 MACHINEPATH : $project_path + "machines/"
64 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
72 archive_dir : 'default'
79 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
80 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
84 # Define all possible option for the package command : sat package <options>
85 parser = src.options.Options()
86 parser.add_option('b', 'binaries', 'boolean', 'binaries',
87 _('Optional: Produce a binary package.'), False)
88 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
89 _('Optional: Only binary package: produce the archive even if '
90 'there are some missing products.'), False)
91 parser.add_option('s', 'sources', 'boolean', 'sources',
92 _('Optional: Produce a compilable archive of the sources of the '
93 'application.'), False)
94 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
95 _('Optional: Only source package: do not make archive of vcs products.'),
97 parser.add_option('p', 'project', 'string', 'project',
98 _('Optional: Produce an archive that contains a project.'), "")
99 parser.add_option('t', 'salometools', 'boolean', 'sat',
100 _('Optional: Produce an archive that contains salomeTools.'), False)
101 parser.add_option('n', 'name', 'string', 'name',
102 _('Optional: The name or full path of the archive.'), None)
103 parser.add_option('', 'add_files', 'list2', 'add_files',
104 _('Optional: The list of additional files to add to the archive.'), [])
105 parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
106 _('Optional: do not add commercial licence.'), False)
107 parser.add_option('', 'without_properties', 'properties', 'without_properties',
108 _('Optional: Filter the products by their properties.\n\tSyntax: '
109 '--without_properties <property>:<value>'))
112 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
113 '''Create an archive containing all directories and files that are given in
114 the d_content argument.
116 :param tar tarfile: The tarfile instance used to make the archive.
117 :param name_archive str: The name of the archive to make.
118 :param d_content dict: The dictionary that contain all directories and files
119 to add in the archive.
121 (path_on_local_machine, path_in_archive)
122 :param logger Logger: the logging instance
123 :param f_exclude Function: the function that filters
124 :return: 0 if success, 1 if not.
127 # get the max length of the messages in order to make the display
128 max_len = len(max(d_content.keys(), key=len))
131 # loop over each directory or file stored in the d_content dictionary
132 names = sorted(d_content.keys())
133 DBG.write("add tar names", names)
136 # display information
137 len_points = max_len - len(name)
138 local_path, archive_path = d_content[name]
139 in_archive = os.path.join(name_archive, archive_path)
140 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
141 # Get the local path and the path in archive
142 # of the directory or file to add
143 # Add it in the archive
145 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
146 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
147 except Exception as e:
148 logger.write(src.printcolors.printcError(_("KO ")), 3)
149 logger.write(str(e), 3)
151 logger.write("\n", 3)
154 def exclude_VCS_and_extensions(filename):
155 ''' The function that is used to exclude from package the link to the
156 VCS repositories (like .git)
158 :param filename Str: The filname to exclude (or not).
159 :return: True if the file has to be exclude
162 for dir_name in IGNORED_DIRS:
163 if dir_name in filename:
165 for extension in IGNORED_EXTENSIONS:
166 if filename.endswith(extension):
170 def produce_relative_launcher(config,
175 with_commercial=True):
176 '''Create a specific SALOME launcher for the binary package. This launcher
179 :param config Config: The global configuration.
180 :param logger Logger: the logging instance
181 :param file_dir str: the directory where to put the launcher
182 :param file_name str: The launcher name
183 :param binaries_dir_name str: the name of the repository where the binaries
185 :return: the path of the produced launcher
189 # get KERNEL installation path
190 kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
192 # set kernel bin dir (considering fhs property)
193 kernel_cfg = src.product.get_product_config(config, "KERNEL")
194 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
195 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
197 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
199 # check if the application contains an application module
200 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(),
202 salome_application_name="Not defined"
203 for prod_name, prod_info in l_product_info:
204 # look for a salome application
205 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
206 salome_application_name=prod_info.name
208 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
209 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
210 if salome_application_name == "Not defined":
211 app_root_dir=kernel_root_dir
213 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
215 # Get the launcher template and do substitutions
216 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
217 withProfile = src.fileEnviron.withProfile3
219 withProfile = src.fileEnviron.withProfile
221 withProfile = withProfile.replace(
222 "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
223 "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + app_root_dir + "'")
224 withProfile = withProfile.replace(
225 " 'BIN_KERNEL_INSTALL_DIR'",
226 " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
228 before, after = withProfile.split("# here your local standalone environment\n")
230 # create an environment file writer
231 writer = src.environment.FileEnvWriter(config,
236 filepath = os.path.join(file_dir, file_name)
237 # open the file and write into it
238 launch_file = open(filepath, "w")
239 launch_file.write(before)
241 writer.write_cfgForPy_file(launch_file,
242 for_package = binaries_dir_name,
243 with_commercial=with_commercial)
244 launch_file.write(after)
247 # Little hack to put out_dir_Path outside the strings
248 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
250 # A hack to put a call to a file for distene licence.
251 # It does nothing to an application that has no distene product
252 hack_for_distene_licence(filepath)
254 # change the rights in order to make the file executable for everybody
266 def hack_for_distene_licence(filepath):
267 '''Replace the distene licence env variable by a call to a file.
269 :param filepath Str: The path to the launcher to modify.
271 shutil.move(filepath, filepath + "_old")
273 filein = filepath + "_old"
274 fin = open(filein, "r")
275 fout = open(fileout, "w")
276 text = fin.readlines()
277 # Find the Distene section
279 for i,line in enumerate(text):
280 if "# Set DISTENE License" in line:
284 # No distene product, there is nothing to do
290 del text[num_line +1]
291 del text[num_line +1]
292 text_to_insert =""" import imp
294 distene = imp.load_source('distene_licence', '/data/tmpsalome/salome/prerequis/install/LICENSE/dlim8.var.py')
295 distene.set_distene_variables(context)
298 text.insert(num_line + 1, text_to_insert)
305 def produce_relative_env_files(config,
309 '''Create some specific environment files for the binary package. These
310 files use relative paths.
312 :param config Config: The global configuration.
313 :param logger Logger: the logging instance
314 :param file_dir str: the directory where to put the files
315 :param binaries_dir_name str: the name of the repository where the binaries
317 :return: the list of path of the produced environment files
320 # create an environment file writer
321 writer = src.environment.FileEnvWriter(config,
327 filepath = writer.write_env_file("env_launch.sh",
330 for_package = binaries_dir_name)
332 # Little hack to put out_dir_Path as environment variable
333 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
335 # change the rights in order to make the file executable for everybody
347 def produce_install_bin_file(config,
352 '''Create a bash shell script which do substitutions in BIRARIES dir
353 in order to use it for extra compilations.
355 :param config Config: The global configuration.
356 :param logger Logger: the logging instance
357 :param file_dir str: the directory where to put the files
358 :param d_sub, dict: the dictionnary that contains the substitutions to be done
359 :param file_name str: the name of the install script file
360 :return: the produced file
364 filepath = os.path.join(file_dir, file_name)
365 # open the file and write into it
366 # use codec utf-8 as sat variables are in unicode
367 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
368 installbin_template_path = os.path.join(config.VARS.internal_dir,
369 "INSTALL_BIN.template")
371 # build the name of the directory that will contain the binaries
372 binaries_dir_name = "BINARIES-" + config.VARS.dist
373 # build the substitution loop
374 loop_cmd = "for f in $(grep -RIl"
376 loop_cmd += " -e "+ key
377 loop_cmd += ' INSTALL); do\n sed -i "\n'
379 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
380 loop_cmd += ' " $f\ndone'
383 d["BINARIES_DIR"] = binaries_dir_name
384 d["SUBSTITUTION_LOOP"]=loop_cmd
386 # substitute the template and write it in file
387 content=src.template.substitute(installbin_template_path, d)
388 installbin_file.write(content)
389 # change the rights in order to make the file executable for everybody
401 def product_appli_creation_script(config,
405 '''Create a script that can produce an application (EDF style) in the binary
408 :param config Config: The global configuration.
409 :param logger Logger: the logging instance
410 :param file_dir str: the directory where to put the file
411 :param binaries_dir_name str: the name of the repository where the binaries
413 :return: the path of the produced script file
416 template_name = "create_appli.py.for_bin_packages.template"
417 template_path = os.path.join(config.VARS.internal_dir, template_name)
418 text_to_fill = open(template_path, "r").read()
419 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
420 '"' + binaries_dir_name + '"')
423 for product_name in get_SALOME_modules(config):
424 product_info = src.product.get_product_config(config, product_name)
426 if src.product.product_is_smesh_plugin(product_info):
429 if 'install_dir' in product_info and bool(product_info.install_dir):
430 if src.product.product_is_cpp(product_info):
432 for cpp_name in src.product.get_product_components(product_info):
433 line_to_add = ("<module name=\"" +
435 "\" gui=\"yes\" path=\"''' + "
436 "os.path.join(dir_bin_name, \"" +
437 cpp_name + "\") + '''\"/>")
440 line_to_add = ("<module name=\"" +
442 "\" gui=\"yes\" path=\"''' + "
443 "os.path.join(dir_bin_name, \"" +
444 product_name + "\") + '''\"/>")
445 text_to_add += line_to_add + "\n"
447 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
449 tmp_file_path = os.path.join(file_dir, "create_appli.py")
450 ff = open(tmp_file_path, "w")
451 ff.write(filled_text)
454 # change the rights in order to make the file executable for everybody
455 os.chmod(tmp_file_path,
466 def binary_package(config, logger, options, tmp_working_dir):
467 '''Prepare a dictionary that stores all the needed directories and files to
468 add in a binary package.
470 :param config Config: The global configuration.
471 :param logger Logger: the logging instance
472 :param options OptResult: the options of the launched command
473 :param tmp_working_dir str: The temporary local directory containing some
474 specific directories or files needed in the
476 :return: the dictionary that stores all the needed directories and files to
477 add in a binary package.
478 {label : (path_on_local_machine, path_in_archive)}
482 # Get the list of product installation to add to the archive
483 l_products_name = sorted(config.APPLICATION.products.keys())
484 l_product_info = src.product.get_products_infos(l_products_name,
489 l_sources_not_present = []
490 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
491 if ("APPLICATION" in config and
492 "properties" in config.APPLICATION and
493 "mesa_launcher_in_package" in config.APPLICATION.properties and
494 config.APPLICATION.properties.mesa_launcher_in_package == "yes") :
495 generate_mesa_launcher=True
497 for prod_name, prod_info in l_product_info:
498 # skip product with property not_in_package set to yes
499 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
502 # Add the sources of the products that have the property
503 # sources_in_package : "yes"
504 if src.get_property_in_product_cfg(prod_info,
505 "sources_in_package") == "yes":
506 if os.path.exists(prod_info.source_dir):
507 l_source_dir.append((prod_name, prod_info.source_dir))
509 l_sources_not_present.append(prod_name)
511 # ignore the native and fixed products for install directories
512 if (src.product.product_is_native(prod_info)
513 or src.product.product_is_fixed(prod_info)
514 or not src.product.product_compiles(prod_info)):
516 if src.product.check_installation(prod_info):
517 l_install_dir.append((prod_name, prod_info.install_dir))
519 l_not_installed.append(prod_name)
521 # Add also the cpp generated modules (if any)
522 if src.product.product_is_cpp(prod_info):
524 for name_cpp in src.product.get_product_components(prod_info):
525 install_dir = os.path.join(config.APPLICATION.workdir,
527 if os.path.exists(install_dir):
528 l_install_dir.append((name_cpp, install_dir))
530 l_not_installed.append(name_cpp)
532 # check the name of the directory that (could) contains the binaries
533 # from previous detar
534 binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
535 if os.path.exists(binaries_from_detar):
537 WARNING: existing binaries directory from previous detar installation:
539 To make new package from this, you have to:
540 1) install binaries in INSTALL directory with the script "install_bin.sh"
541 see README file for more details
542 2) or recompile everything in INSTALL with "sat compile" command
543 this step is long, and requires some linux packages to be installed
545 """ % binaries_from_detar)
547 # Print warning or error if there are some missing products
548 if len(l_not_installed) > 0:
549 text_missing_prods = ""
550 for p_name in l_not_installed:
551 text_missing_prods += "-" + p_name + "\n"
552 if not options.force_creation:
553 msg = _("ERROR: there are missing products installations:")
554 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
559 msg = _("WARNING: there are missing products installations:")
560 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
564 # Do the same for sources
565 if len(l_sources_not_present) > 0:
566 text_missing_prods = ""
567 for p_name in l_sources_not_present:
568 text_missing_prods += "-" + p_name + "\n"
569 if not options.force_creation:
570 msg = _("ERROR: there are missing products sources:")
571 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
576 msg = _("WARNING: there are missing products sources:")
577 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
581 # construct the name of the directory that will contain the binaries
582 binaries_dir_name = "BINARIES-" + config.VARS.dist
584 # construct the correlation table between the product names, there
585 # actual install directories and there install directory in archive
587 for prod_name, install_dir in l_install_dir:
588 path_in_archive = os.path.join(binaries_dir_name, prod_name)
589 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
591 for prod_name, source_dir in l_source_dir:
592 path_in_archive = os.path.join("SOURCES", prod_name)
593 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
595 # for packages of SALOME applications including KERNEL,
596 # we produce a salome launcher or a virtual application (depending on salome version)
597 if 'KERNEL' in config.APPLICATION.products:
598 VersionSalome = src.get_salome_version(config)
599 # Case where SALOME has the launcher that uses the SalomeContext API
600 if VersionSalome >= 730:
601 # create the relative launcher and add it to the files to add
602 launcher_name = src.get_launcher_name(config)
603 launcher_package = produce_relative_launcher(config,
608 not(options.without_commercial))
609 d_products["launcher"] = (launcher_package, launcher_name)
611 # if the application contains mesa products, we generate in addition to the
612 # classical salome launcher a launcher using mesa and called mesa_salome
613 # (the mesa launcher will be used for remote usage through ssh).
614 if generate_mesa_launcher:
615 #if there is one : store the use_mesa property
616 restore_use_mesa_option=None
617 if ('properties' in config.APPLICATION and
618 'use_mesa' in config.APPLICATION.properties):
619 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
621 # activate mesa property, and generate a mesa launcher
622 src.activate_mesa_property(config) #activate use_mesa property
623 launcher_mesa_name="mesa_"+launcher_name
624 launcher_package_mesa = produce_relative_launcher(config,
629 not(options.without_commercial))
630 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
632 # if there was a use_mesa value, we restore it
633 # else we set it to the default value "no"
634 if restore_use_mesa_option != None:
635 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
637 config.APPLICATION.properties.use_mesa="no"
640 # if we mix binaries and sources, we add a copy of the launcher,
641 # prefixed with "bin",in order to avoid clashes
642 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
644 # Provide a script for the creation of an application EDF style
645 appli_script = product_appli_creation_script(config,
650 d_products["appli script"] = (appli_script, "create_appli.py")
652 # Put also the environment file
653 env_file = produce_relative_env_files(config,
658 d_products["environment file"] = (env_file, "env_launch.sh")
662 def source_package(sat, config, logger, options, tmp_working_dir):
663 '''Prepare a dictionary that stores all the needed directories and files to
664 add in a source package.
666 :param config Config: The global configuration.
667 :param logger Logger: the logging instance
668 :param options OptResult: the options of the launched command
669 :param tmp_working_dir str: The temporary local directory containing some
670 specific directories or files needed in the
672 :return: the dictionary that stores all the needed directories and files to
673 add in a source package.
674 {label : (path_on_local_machine, path_in_archive)}
678 # Get all the products that are prepared using an archive
679 logger.write("Find archive products ... ")
680 d_archives, l_pinfo_vcs = get_archives(config, logger)
681 logger.write("Done\n")
683 if not options.with_vcs and len(l_pinfo_vcs) > 0:
684 # Make archives with the products that are not prepared using an archive
685 # (git, cvs, svn, etc)
686 logger.write("Construct archives for vcs products ... ")
687 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
692 logger.write("Done\n")
695 logger.write("Create the project ... ")
696 d_project = create_project_for_src_package(config,
699 logger.write("Done\n")
702 tmp_sat = add_salomeTools(config, tmp_working_dir)
703 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
705 # Add a sat symbolic link if not win
706 if not src.architecture.is_windows():
707 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
711 # In the jobs, os.getcwd() can fail
712 t = config.LOCAL.workdir
713 os.chdir(tmp_working_dir)
714 if os.path.lexists(tmp_satlink_path):
715 os.remove(tmp_satlink_path)
716 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
719 d_sat["sat link"] = (tmp_satlink_path, "sat")
721 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
724 def get_archives(config, logger):
725 '''Find all the products that are get using an archive and all the products
726 that are get using a vcs (git, cvs, svn) repository.
728 :param config Config: The global configuration.
729 :param logger Logger: the logging instance
730 :return: the dictionary {name_product :
731 (local path of its archive, path in the package of its archive )}
732 and the list of specific configuration corresponding to the vcs
736 # Get the list of product informations
737 l_products_name = config.APPLICATION.products.keys()
738 l_product_info = src.product.get_products_infos(l_products_name,
742 for p_name, p_info in l_product_info:
743 # skip product with property not_in_package set to yes
744 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
746 # ignore the native and fixed products
747 if (src.product.product_is_native(p_info)
748 or src.product.product_is_fixed(p_info)):
750 if p_info.get_source == "archive":
751 archive_path = p_info.archive_info.archive_name
752 archive_name = os.path.basename(archive_path)
754 l_pinfo_vcs.append((p_name, p_info))
756 d_archives[p_name] = (archive_path,
757 os.path.join(ARCHIVE_DIR, archive_name))
758 return d_archives, l_pinfo_vcs
760 def add_salomeTools(config, tmp_working_dir):
761 '''Prepare a version of salomeTools that has a specific local.pyconf file
762 configured for a source package.
764 :param config Config: The global configuration.
765 :param tmp_working_dir str: The temporary local directory containing some
766 specific directories or files needed in the
768 :return: The path to the local salomeTools directory to add in the package
771 # Copy sat in the temporary working directory
772 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
773 sat_running_path = src.Path(config.VARS.salometoolsway)
774 sat_running_path.copy(sat_tmp_path)
776 # Update the local.pyconf file that contains the path to the project
777 local_pyconf_name = "local.pyconf"
778 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
779 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
780 # Remove the .pyconf file in the root directory of salomeTools if there is
781 # any. (For example when launching jobs, a pyconf file describing the jobs
782 # can be here and is not useful)
783 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
784 for file_or_dir in files_or_dir_SAT:
785 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
786 file_path = os.path.join(tmp_working_dir,
791 ff = open(local_pyconf_file, "w")
792 ff.write(LOCAL_TEMPLATE)
795 return sat_tmp_path.path
797 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
798 '''For sources package that require that all products are get using an
799 archive, one has to create some archive for the vcs products.
800 So this method calls the clean and source command of sat and then create
803 :param l_pinfo_vcs List: The list of specific configuration corresponding to
805 :param sat Sat: The Sat instance that can be called to clean and source the
807 :param config Config: The global configuration.
808 :param logger Logger: the logging instance
809 :param tmp_working_dir str: The temporary local directory containing some
810 specific directories or files needed in the
812 :return: the dictionary that stores all the archives to add in the source
813 package. {label : (path_on_local_machine, path_in_archive)}
816 # clean the source directory of all the vcs products, then use the source
817 # command and thus construct an archive that will not contain the patches
818 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
819 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
820 logger.write(_("\nclean sources\n"))
821 args_clean = config.VARS.application
822 args_clean += " --sources --products "
823 args_clean += ",".join(l_prod_names)
824 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
825 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
828 logger.write(_("get sources\n"))
829 args_source = config.VARS.application
830 args_source += " --products "
831 args_source += ",".join(l_prod_names)
832 svgDir = sat.cfg.APPLICATION.workdir
833 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
834 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
835 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
836 # DBG.write("sat config id", id(sat.cfg), True)
837 # shit as config is not same id() as for sat.source()
838 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
840 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
842 # make the new archives
844 for pn, pinfo in l_pinfo_vcs:
845 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
846 logger.write("make archive vcs '%s'\n" % path_archive)
847 d_archives_vcs[pn] = (path_archive,
848 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
849 sat.cfg.APPLICATION.workdir = svgDir
850 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
851 return d_archives_vcs
853 def make_archive(prod_name, prod_info, where):
854 '''Create an archive of a product by searching its source directory.
856 :param prod_name str: The name of the product.
857 :param prod_info Config: The specific configuration corresponding to the
859 :param where str: The path of the repository where to put the resulting
861 :return: The path of the resulting archive
864 path_targz_prod = os.path.join(where, prod_name + ".tgz")
865 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
866 local_path = prod_info.source_dir
867 tar_prod.add(local_path,
869 exclude=exclude_VCS_and_extensions)
871 return path_targz_prod
873 def create_project_for_src_package(config, tmp_working_dir, with_vcs):
874 '''Create a specific project for a source package.
876 :param config Config: The global configuration.
877 :param tmp_working_dir str: The temporary local directory containing some
878 specific directories or files needed in the
880 :param with_vcs boolean: True if the package is with vcs products (not
881 transformed into archive products)
882 :return: The dictionary
883 {"project" : (produced project, project path in the archive)}
887 # Create in the working temporary directory the full project tree
888 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
889 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
891 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
894 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
897 patches_tmp_dir = os.path.join(project_tmp_dir,
900 application_tmp_dir = os.path.join(project_tmp_dir,
902 for directory in [project_tmp_dir,
903 compil_scripts_tmp_dir,
906 application_tmp_dir]:
907 src.ensure_path_exists(directory)
909 # Create the pyconf that contains the information of the project
910 project_pyconf_name = "project.pyconf"
911 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
912 ff = open(project_pyconf_file, "w")
913 ff.write(PROJECT_TEMPLATE)
916 # Loop over the products to get there pyconf and all the scripts
917 # (compilation, environment, patches)
918 # and create the pyconf file to add to the project
919 lproducts_name = config.APPLICATION.products.keys()
920 l_products = src.product.get_products_infos(lproducts_name, config)
921 for p_name, p_info in l_products:
922 # skip product with property not_in_package set to yes
923 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
925 find_product_scripts_and_pyconf(p_name,
929 compil_scripts_tmp_dir,
932 products_pyconf_tmp_dir)
934 find_application_pyconf(config, application_tmp_dir)
936 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
939 def find_product_scripts_and_pyconf(p_name,
943 compil_scripts_tmp_dir,
946 products_pyconf_tmp_dir):
947 '''Create a specific pyconf file for a given product. Get its environment
948 script, its compilation script and patches and put it in the temporary
949 working directory. This method is used in the source package in order to
950 construct the specific project.
952 :param p_name str: The name of the product.
953 :param p_info Config: The specific configuration corresponding to the
955 :param config Config: The global configuration.
956 :param with_vcs boolean: True if the package is with vcs products (not
957 transformed into archive products)
958 :param compil_scripts_tmp_dir str: The path to the temporary compilation
959 scripts directory of the project.
960 :param env_scripts_tmp_dir str: The path to the temporary environment script
961 directory of the project.
962 :param patches_tmp_dir str: The path to the temporary patch scripts
963 directory of the project.
964 :param products_pyconf_tmp_dir str: The path to the temporary product
965 scripts directory of the project.
968 # read the pyconf of the product
969 product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
970 config.PATHS.PRODUCTPATH)
971 product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
973 # find the compilation script if any
974 if src.product.product_has_script(p_info):
975 compil_script_path = src.Path(p_info.compil_script)
976 compil_script_path.copy(compil_scripts_tmp_dir)
977 product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
978 p_info.compil_script)
979 # find the environment script if any
980 if src.product.product_has_env_script(p_info):
981 env_script_path = src.Path(p_info.environ.env_script)
982 env_script_path.copy(env_scripts_tmp_dir)
983 product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
984 p_info.environ.env_script)
985 # find the patches if any
986 if src.product.product_has_patches(p_info):
987 patches = src.pyconf.Sequence()
988 for patch_path in p_info.patches:
989 p_path = src.Path(patch_path)
990 p_path.copy(patches_tmp_dir)
991 patches.append(os.path.basename(patch_path), "")
993 product_pyconf_cfg[p_info.section].patches = patches
996 # put in the pyconf file the resolved values
997 for info in ["git_info", "cvs_info", "svn_info"]:
999 for key in p_info[info]:
1000 product_pyconf_cfg[p_info.section][info][key] = p_info[
1003 # if the product is not archive, then make it become archive.
1004 if src.product.product_is_vcs(p_info):
1005 product_pyconf_cfg[p_info.section].get_source = "archive"
1006 if not "archive_info" in product_pyconf_cfg[p_info.section]:
1007 product_pyconf_cfg[p_info.section].addMapping("archive_info",
1008 src.pyconf.Mapping(product_pyconf_cfg),
1010 product_pyconf_cfg[p_info.section
1011 ].archive_info.archive_name = p_info.name + ".tgz"
1013 # write the pyconf file to the temporary project location
1014 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1016 ff = open(product_tmp_pyconf_path, 'w')
1017 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1018 product_pyconf_cfg.__save__(ff, 1)
1021 def find_application_pyconf(config, application_tmp_dir):
1022 '''Find the application pyconf file and put it in the specific temporary
1023 directory containing the specific project of a source package.
1025 :param config Config: The global configuration.
1026 :param application_tmp_dir str: The path to the temporary application
1027 scripts directory of the project.
1029 # read the pyconf of the application
1030 application_name = config.VARS.application
1031 application_pyconf_path = src.find_file_in_lpath(
1032 application_name + ".pyconf",
1033 config.PATHS.APPLICATIONPATH)
1034 application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1036 # Change the workdir
1037 application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1038 application_pyconf_cfg,
1040 'VARS.salometoolsway + $VARS.sep + ".."')
1042 # Prevent from compilation in base
1043 application_pyconf_cfg.APPLICATION.no_base = "yes"
1045 #remove products that are not in config (which were filtered by --without_properties)
1046 for product_name in application_pyconf_cfg.APPLICATION.products.keys():
1047 if product_name not in config.APPLICATION.products.keys():
1048 application_pyconf_cfg.APPLICATION.products.__delitem__(product_name)
1050 # write the pyconf file to the temporary application location
1051 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1052 application_name + ".pyconf")
1054 ff = open(application_tmp_pyconf_path, 'w')
1055 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1056 application_pyconf_cfg.__save__(ff, 1)
1059 def project_package(config, name_project, project_file_path, tmp_working_dir, logger):
1060 '''Prepare a dictionary that stores all the needed directories and files to
1061 add in a project package.
1063 :param project_file_path str: The path to the local project.
1064 :param tmp_working_dir str: The temporary local directory containing some
1065 specific directories or files needed in the
1067 :return: the dictionary that stores all the needed directories and files to
1068 add in a project package.
1069 {label : (path_on_local_machine, path_in_archive)}
1073 # Read the project file and get the directories to add to the package
1076 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1079 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1080 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1081 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1083 paths = {"ARCHIVEPATH" : "archives",
1084 "APPLICATIONPATH" : "applications",
1085 "PRODUCTPATH" : "products",
1087 "MACHINEPATH" : "machines"}
1088 # Loop over the project paths and add it
1090 if path not in project_pyconf_cfg:
1092 # Add the directory to the files to add in the package
1093 d_project[path] = (project_pyconf_cfg[path], paths[path])
1094 # Modify the value of the path in the package
1095 project_pyconf_cfg[path] = src.pyconf.Reference(
1098 'project_path + "/' + paths[path] + '"')
1100 # Modify some values
1101 if "project_path" not in project_pyconf_cfg:
1102 project_pyconf_cfg.addMapping("project_path",
1103 src.pyconf.Mapping(project_pyconf_cfg),
1105 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1109 # Write the project pyconf file
1110 project_file_name = os.path.basename(project_file_path)
1111 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1112 ff = open(project_pyconf_tmp_path, 'w')
1113 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1114 project_pyconf_cfg.__save__(ff, 1)
1116 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_name)
1120 def add_readme(config, options, where):
1121 readme_path = os.path.join(where, "README")
1122 with codecs.open(readme_path, "w", 'utf-8') as f:
1124 # templates for building the header
1126 # This package was generated with sat $version
1129 # Distribution : $dist
1131 In the following, $$ROOT represents the directory where you have installed
1132 SALOME (the directory where this file is located).
1135 readme_compilation_with_binaries="""
1137 compilation based on the binaries used as prerequisites
1138 =======================================================
1140 If you fail to compile the complete application (for example because
1141 you are not root on your system and cannot install missing packages), you
1142 may try a partial compilation based on the binaries.
1143 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1144 and do some substitutions on cmake and .la files (replace the build directories
1146 The procedure to do it is:
1147 1) Remove or rename INSTALL directory if it exists
1148 2) Execute the shell script install_bin.sh:
1151 3) Use SalomeTool (as explained in Sources section) and compile only the
1152 modules you need to (with -p option)
1155 readme_header_tpl=string.Template(readme_header)
1156 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1157 "README_BIN.template")
1158 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1159 "README_LAUNCHER.template")
1160 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1161 "README_BIN_VIRTUAL_APP.template")
1162 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1163 "README_SRC.template")
1164 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1165 "README_PROJECT.template")
1166 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1167 "README_SAT.template")
1169 # prepare substitution dictionary
1171 d['user'] = config.VARS.user
1172 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1173 d['version'] = config.INTERNAL.sat_version
1174 d['dist'] = config.VARS.dist
1175 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1177 if options.binaries or options.sources:
1178 d['application'] = config.VARS.application
1179 f.write("# Application: " + d['application'] + "\n")
1180 if 'KERNEL' in config.APPLICATION.products:
1181 VersionSalome = src.get_salome_version(config)
1182 # Case where SALOME has the launcher that uses the SalomeContext API
1183 if VersionSalome >= 730:
1184 d['launcher'] = config.APPLICATION.profile.launcher_name
1186 d['virtual_app'] = 'runAppli' # this info is not used now)
1188 # write the specific sections
1189 if options.binaries:
1190 f.write(src.template.substitute(readme_template_path_bin, d))
1191 if "virtual_app" in d:
1192 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1194 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1197 f.write(src.template.substitute(readme_template_path_src, d))
1199 if options.binaries and options.sources:
1200 f.write(readme_compilation_with_binaries)
1203 f.write(src.template.substitute(readme_template_path_pro, d))
1206 f.write(src.template.substitute(readme_template_path_sat, d))
1210 def update_config(config, prop, value):
1211 '''Remove from config.APPLICATION.products the products that have the property given as input.
1213 :param config Config: The global config.
1214 :param prop str: The property to filter
1215 :param value str: The value of the property to filter
1217 src.check_config_has_application(config)
1218 l_product_to_remove = []
1219 for product_name in config.APPLICATION.products.keys():
1220 prod_cfg = src.product.get_product_config(config, product_name)
1221 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1222 l_product_to_remove.append(product_name)
1223 for product_name in l_product_to_remove:
1224 config.APPLICATION.products.__delitem__(product_name)
1227 '''method that is called when salomeTools is called with --help option.
1229 :return: The text to display for the package command description.
1233 The package command creates a tar file archive of a product.
1234 There are four kinds of archive, which can be mixed:
1236 1 - The binary archive.
1237 It contains the product installation directories plus a launcher.
1238 2 - The sources archive.
1239 It contains the product archives, a project (the application plus salomeTools).
1240 3 - The project archive.
1241 It contains a project (give the project file path as argument).
1242 4 - The salomeTools archive.
1243 It contains code utility salomeTools.
1246 >> sat package SALOME-master --binaries --sources""")
1248 def run(args, runner, logger):
1249 '''method that is called when salomeTools is called with package parameter.
1253 (options, args) = parser.parse_args(args)
1255 # Check that a type of package is called, and only one
1256 all_option_types = (options.binaries,
1258 options.project not in ["", None],
1261 # Check if no option for package type
1262 if all_option_types.count(True) == 0:
1263 msg = _("Error: Precise a type for the package\nUse one of the "
1264 "following options: --binaries, --sources, --project or"
1266 logger.write(src.printcolors.printcError(msg), 1)
1267 logger.write("\n", 1)
1270 # The repository where to put the package if not Binary or Source
1271 package_default_path = runner.cfg.LOCAL.workdir
1273 # if the package contains binaries or sources:
1274 if options.binaries or options.sources:
1275 # Check that the command has been called with an application
1276 src.check_config_has_application(runner.cfg)
1278 # Display information
1279 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1280 runner.cfg.VARS.application), 1)
1282 # Get the default directory where to put the packages
1283 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir,
1285 src.ensure_path_exists(package_default_path)
1287 # if the package contains a project:
1289 # check that the project is visible by SAT
1290 projectNameFile = options.project + ".pyconf"
1292 for i in runner.cfg.PROJECTS.project_file_paths:
1293 baseName = os.path.basename(i)
1294 if baseName == projectNameFile:
1298 if foundProject is None:
1299 local_path = os.path.join(runner.cfg.VARS.salometoolsway,
1302 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1306 Please add it in file:
1308 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1309 logger.write(src.printcolors.printcError(msg), 1)
1310 logger.write("\n", 1)
1313 options.project_file_path = foundProject
1314 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1316 # Remove the products that are filtered by the --without_properties option
1317 if options.without_properties:
1318 app = runner.cfg.APPLICATION
1319 logger.trace("without_properties all products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1320 prop, value = options.without_properties
1321 update_config(runner.cfg, prop, value)
1322 logger.warning("without_properties selected products:\n %s\n" % PP.pformat(sorted(app.products.keys())))
1325 # get the name of the archive or build it
1327 if os.path.basename(options.name) == options.name:
1328 # only a name (not a path)
1329 archive_name = options.name
1330 dir_name = package_default_path
1332 archive_name = os.path.basename(options.name)
1333 dir_name = os.path.dirname(options.name)
1335 # suppress extension
1336 if archive_name[-len(".tgz"):] == ".tgz":
1337 archive_name = archive_name[:-len(".tgz")]
1338 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1339 archive_name = archive_name[:-len(".tar.gz")]
1343 dir_name = package_default_path
1344 if options.binaries or options.sources:
1345 archive_name = runner.cfg.APPLICATION.name
1347 if options.binaries:
1348 archive_name += "-"+runner.cfg.VARS.dist
1351 archive_name += "-SRC"
1352 if options.with_vcs:
1353 archive_name += "-VCS"
1356 project_name = options.project
1357 archive_name += ("PROJECT-" + project_name)
1360 archive_name += ("salomeTools_" + runner.cfg.INTERNAL.sat_version)
1361 if len(archive_name)==0: # no option worked
1362 msg = _("Error: Cannot name the archive\n"
1363 " check if at least one of the following options was "
1364 "selected : --binaries, --sources, --project or"
1366 logger.write(src.printcolors.printcError(msg), 1)
1367 logger.write("\n", 1)
1370 path_targz = os.path.join(dir_name, archive_name + ".tgz")
1372 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1374 # Create a working directory for all files that are produced during the
1375 # package creation and that will be removed at the end of the command
1376 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1377 src.ensure_path_exists(tmp_working_dir)
1378 logger.write("\n", 5)
1379 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1381 logger.write("\n", 3)
1383 msg = _("Preparation of files to add to the archive")
1384 logger.write(src.printcolors.printcLabel(msg), 2)
1385 logger.write("\n", 2)
1387 d_files_to_add={} # content of the archive
1389 # a dict to hold paths that will need to be substitute for users recompilations
1390 d_paths_to_substitute={}
1392 if options.binaries:
1393 d_bin_files_to_add = binary_package(runner.cfg,
1397 # for all binaries dir, store the substitution that will be required
1398 # for extra compilations
1399 for key in d_bin_files_to_add:
1400 if key.endswith("(bin)"):
1401 source_dir = d_bin_files_to_add[key][0]
1402 path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" + runner.cfg.VARS.dist,"INSTALL")
1403 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1404 # if basename is the same we will just substitute the dirname
1405 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1406 os.path.dirname(path_in_archive)
1408 d_paths_to_substitute[source_dir]=path_in_archive
1410 d_files_to_add.update(d_bin_files_to_add)
1413 d_files_to_add.update(source_package(runner,
1418 if options.binaries:
1419 # for archives with bin and sources we provide a shell script able to
1420 # install binaries for compilation
1421 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1423 d_paths_to_substitute,
1425 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1426 logger.write("substitutions that need to be done later : \n", 5)
1427 logger.write(str(d_paths_to_substitute), 5)
1428 logger.write("\n", 5)
1430 # --salomeTool option is not considered when --sources is selected, as this option
1431 # already brings salomeTool!
1433 d_files_to_add.update({"salomeTools" : (runner.cfg.VARS.salometoolsway, "")})
1436 DBG.write("config for package %s" % project_name, runner.cfg)
1437 d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, tmp_working_dir, logger))
1439 if not(d_files_to_add):
1440 msg = _("Error: Empty dictionnary to build the archive!\n")
1441 logger.write(src.printcolors.printcError(msg), 1)
1442 logger.write("\n", 1)
1445 # Add the README file in the package
1446 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1447 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1449 # Add the additional files of option add_files
1450 if options.add_files:
1451 for file_path in options.add_files:
1452 if not os.path.exists(file_path):
1453 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1455 file_name = os.path.basename(file_path)
1456 d_files_to_add[file_name] = (file_path, file_name)
1458 logger.write("\n", 2)
1460 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1461 logger.write("\n", 2)
1465 # Creating the object tarfile
1466 tar = tarfile.open(path_targz, mode='w:gz')
1468 # get the filtering function if needed
1469 filter_function = exclude_VCS_and_extensions
1471 # Add the files to the tarfile object
1472 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1474 except KeyboardInterrupt:
1475 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1476 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1477 # remove the working directory
1478 shutil.rmtree(tmp_working_dir)
1479 logger.write(_("OK"), 1)
1480 logger.write(_("\n"), 1)
1483 # unconditionaly remove the tmp_local_working_dir
1484 tmp_local_working_dir = os.path.join(runner.cfg.APPLICATION.workdir, "tmp_package")
1485 if os.path.isdir(tmp_local_working_dir):
1486 shutil.rmtree(tmp_local_working_dir)
1488 # have to decide some time
1489 DBG.tofix("make shutil.rmtree('%s') effective" % tmp_working_dir, "", DBG.isDeveloper())
1491 # Print again the path of the package
1492 logger.write("\n", 2)
1493 src.printcolors.print_value(logger, "Package path", path_targz, 2)