3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
29 from application import get_SALOME_modules
30 import src.debug as DBG
37 ARCHIVE_DIR = "ARCHIVES"
38 PROJECT_DIR = "PROJECT"
40 IGNORED_DIRS = [".git", ".svn"]
41 IGNORED_EXTENSIONS = []
43 PROJECT_TEMPLATE = """#!/usr/bin/env python
46 # The path to the archive root directory
47 root_path : $PWD + "/../"
49 project_path : $PWD + "/"
51 # Where to search the archives of the products
52 ARCHIVEPATH : $root_path + "ARCHIVES"
53 # Where to search the pyconf of the applications
54 APPLICATIONPATH : $project_path + "applications/"
55 # Where to search the pyconf of the products
56 PRODUCTPATH : $project_path + "products/"
57 # Where to search the pyconf of the jobs of the project
58 JOBPATH : $project_path + "jobs/"
59 # Where to search the pyconf of the machines of the project
60 MACHINEPATH : $project_path + "machines/"
63 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
71 archive_dir : 'default'
78 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
79 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
83 # Define all possible option for the package command : sat package <options>
84 parser = src.options.Options()
85 parser.add_option('b', 'binaries', 'boolean', 'binaries',
86 _('Optional: Produce a binary package.'), False)
87 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
88 _('Optional: Only binary package: produce the archive even if '
89 'there are some missing products.'), False)
90 parser.add_option('s', 'sources', 'boolean', 'sources',
91 _('Optional: Produce a compilable archive of the sources of the '
92 'application.'), False)
93 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
94 _('Optional: Only source package: do not make archive of vcs products.'),
96 parser.add_option('p', 'project', 'string', 'project',
97 _('Optional: Produce an archive that contains a project.'), "")
98 parser.add_option('t', 'salometools', 'boolean', 'sat',
99 _('Optional: Produce an archive that contains salomeTools.'), False)
100 parser.add_option('n', 'name', 'string', 'name',
101 _('Optional: The name or full path of the archive.'), None)
102 parser.add_option('', 'add_files', 'list2', 'add_files',
103 _('Optional: The list of additional files to add to the archive.'), [])
104 parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
105 _('Optional: do not add commercial licence.'), False)
106 parser.add_option('', 'without_property', 'string', 'without_property',
107 _('Optional: Filter the products by their properties.\n\tSyntax: '
108 '--without_property <property>:<value>'))
111 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
112 '''Create an archive containing all directories and files that are given in
113 the d_content argument.
115 :param tar tarfile: The tarfile instance used to make the archive.
116 :param name_archive str: The name of the archive to make.
117 :param d_content dict: The dictionary that contain all directories and files
118 to add in the archive.
120 (path_on_local_machine, path_in_archive)
121 :param logger Logger: the logging instance
122 :param f_exclude Function: the function that filters
123 :return: 0 if success, 1 if not.
126 # get the max length of the messages in order to make the display
127 max_len = len(max(d_content.keys(), key=len))
130 # loop over each directory or file stored in the d_content dictionary
131 names = sorted(d_content.keys())
132 DBG.write("add tar names", names)
135 # display information
136 len_points = max_len - len(name)
137 local_path, archive_path = d_content[name]
138 in_archive = os.path.join(name_archive, archive_path)
139 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
140 # Get the local path and the path in archive
141 # of the directory or file to add
142 # Add it in the archive
144 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
145 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
146 except Exception as e:
147 logger.write(src.printcolors.printcError(_("KO ")), 3)
148 logger.write(str(e), 3)
150 logger.write("\n", 3)
153 def exclude_VCS_and_extensions(filename):
154 ''' The function that is used to exclude from package the link to the
155 VCS repositories (like .git)
157 :param filename Str: The filname to exclude (or not).
158 :return: True if the file has to be exclude
161 for dir_name in IGNORED_DIRS:
162 if dir_name in filename:
164 for extension in IGNORED_EXTENSIONS:
165 if filename.endswith(extension):
169 def produce_relative_launcher(config,
174 with_commercial=True):
175 '''Create a specific SALOME launcher for the binary package. This launcher
178 :param config Config: The global configuration.
179 :param logger Logger: the logging instance
180 :param file_dir str: the directory where to put the launcher
181 :param file_name str: The launcher name
182 :param binaries_dir_name str: the name of the repository where the binaries
184 :return: the path of the produced launcher
188 # get KERNEL installation path
189 kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
191 # set kernel bin dir (considering fhs property)
192 kernel_cfg = src.product.get_product_config(config, "KERNEL")
193 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
194 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
196 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
198 # check if the application contains an application module
199 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(),
201 salome_application_name="Not defined"
202 for prod_name, prod_info in l_product_info:
203 # look for a salome application
204 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
205 salome_application_name=prod_info.name
207 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
208 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
209 if salome_application_name == "Not defined":
210 app_root_dir=kernel_root_dir
212 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
214 # Get the launcher template and do substitutions
215 if "python3" in config.APPLICATION and config.APPLICATION.python3 == "yes":
216 withProfile = src.fileEnviron.withProfile3
218 withProfile = src.fileEnviron.withProfile
220 withProfile = withProfile.replace(
221 "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
222 "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + app_root_dir + "'")
223 withProfile = withProfile.replace(
224 " 'BIN_KERNEL_INSTALL_DIR'",
225 " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
227 before, after = withProfile.split("# here your local standalone environment\n")
229 # create an environment file writer
230 writer = src.environment.FileEnvWriter(config,
235 filepath = os.path.join(file_dir, file_name)
236 # open the file and write into it
237 launch_file = open(filepath, "w")
238 launch_file.write(before)
240 writer.write_cfgForPy_file(launch_file,
241 for_package = binaries_dir_name,
242 with_commercial=with_commercial)
243 launch_file.write(after)
246 # Little hack to put out_dir_Path outside the strings
247 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
249 # A hack to put a call to a file for distene licence.
250 # It does nothing to an application that has no distene product
251 hack_for_distene_licence(filepath)
253 # change the rights in order to make the file executable for everybody
265 def hack_for_distene_licence(filepath):
266 '''Replace the distene licence env variable by a call to a file.
268 :param filepath Str: The path to the launcher to modify.
270 shutil.move(filepath, filepath + "_old")
272 filein = filepath + "_old"
273 fin = open(filein, "r")
274 fout = open(fileout, "w")
275 text = fin.readlines()
276 # Find the Distene section
278 for i,line in enumerate(text):
279 if "# Set DISTENE License" in line:
283 # No distene product, there is nothing to do
289 del text[num_line +1]
290 del text[num_line +1]
291 text_to_insert =""" import imp
293 distene = imp.load_source('distene_licence', '/data/tmpsalome/salome/prerequis/install/LICENSE/dlim8.var.py')
294 distene.set_distene_variables(context)
297 text.insert(num_line + 1, text_to_insert)
304 def produce_relative_env_files(config,
308 '''Create some specific environment files for the binary package. These
309 files use relative paths.
311 :param config Config: The global configuration.
312 :param logger Logger: the logging instance
313 :param file_dir str: the directory where to put the files
314 :param binaries_dir_name str: the name of the repository where the binaries
316 :return: the list of path of the produced environment files
319 # create an environment file writer
320 writer = src.environment.FileEnvWriter(config,
326 filepath = writer.write_env_file("env_launch.sh",
329 for_package = binaries_dir_name)
331 # Little hack to put out_dir_Path as environment variable
332 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
334 # change the rights in order to make the file executable for everybody
346 def produce_install_bin_file(config,
351 '''Create a bash shell script which do substitutions in BIRARIES dir
352 in order to use it for extra compilations.
354 :param config Config: The global configuration.
355 :param logger Logger: the logging instance
356 :param file_dir str: the directory where to put the files
357 :param d_sub, dict: the dictionnary that contains the substitutions to be done
358 :param file_name str: the name of the install script file
359 :return: the produced file
363 filepath = os.path.join(file_dir, file_name)
364 # open the file and write into it
365 # use codec utf-8 as sat variables are in unicode
366 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
367 installbin_template_path = os.path.join(config.VARS.internal_dir,
368 "INSTALL_BIN.template")
370 # build the name of the directory that will contain the binaries
371 binaries_dir_name = "BINARIES-" + config.VARS.dist
372 # build the substitution loop
373 loop_cmd = "for f in $(grep -RIl"
375 loop_cmd += " -e "+ key
376 loop_cmd += ' INSTALL); do\n sed -i "\n'
378 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
379 loop_cmd += ' " $f\ndone'
382 d["BINARIES_DIR"] = binaries_dir_name
383 d["SUBSTITUTION_LOOP"]=loop_cmd
385 # substitute the template and write it in file
386 content=src.template.substitute(installbin_template_path, d)
387 installbin_file.write(content)
388 # change the rights in order to make the file executable for everybody
400 def product_appli_creation_script(config,
404 '''Create a script that can produce an application (EDF style) in the binary
407 :param config Config: The global configuration.
408 :param logger Logger: the logging instance
409 :param file_dir str: the directory where to put the file
410 :param binaries_dir_name str: the name of the repository where the binaries
412 :return: the path of the produced script file
415 template_name = "create_appli.py.for_bin_packages.template"
416 template_path = os.path.join(config.VARS.internal_dir, template_name)
417 text_to_fill = open(template_path, "r").read()
418 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
419 '"' + binaries_dir_name + '"')
422 for product_name in get_SALOME_modules(config):
423 product_info = src.product.get_product_config(config, product_name)
425 if src.product.product_is_smesh_plugin(product_info):
428 if 'install_dir' in product_info and bool(product_info.install_dir):
429 if src.product.product_is_cpp(product_info):
431 for cpp_name in src.product.get_product_components(product_info):
432 line_to_add = ("<module name=\"" +
434 "\" gui=\"yes\" path=\"''' + "
435 "os.path.join(dir_bin_name, \"" +
436 cpp_name + "\") + '''\"/>")
439 line_to_add = ("<module name=\"" +
441 "\" gui=\"yes\" path=\"''' + "
442 "os.path.join(dir_bin_name, \"" +
443 product_name + "\") + '''\"/>")
444 text_to_add += line_to_add + "\n"
446 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
448 tmp_file_path = os.path.join(file_dir, "create_appli.py")
449 ff = open(tmp_file_path, "w")
450 ff.write(filled_text)
453 # change the rights in order to make the file executable for everybody
454 os.chmod(tmp_file_path,
465 def binary_package(config, logger, options, tmp_working_dir):
466 '''Prepare a dictionary that stores all the needed directories and files to
467 add in a binary package.
469 :param config Config: The global configuration.
470 :param logger Logger: the logging instance
471 :param options OptResult: the options of the launched command
472 :param tmp_working_dir str: The temporary local directory containing some
473 specific directories or files needed in the
475 :return: the dictionary that stores all the needed directories and files to
476 add in a binary package.
477 {label : (path_on_local_machine, path_in_archive)}
481 # Get the list of product installation to add to the archive
482 l_products_name = sorted(config.APPLICATION.products.keys())
483 l_product_info = src.product.get_products_infos(l_products_name,
488 l_sources_not_present = []
489 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
490 for prod_name, prod_info in l_product_info:
491 # skip product with property not_in_package set to yes
492 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
495 # Add the sources of the products that have the property
496 # sources_in_package : "yes"
497 if src.get_property_in_product_cfg(prod_info,
498 "sources_in_package") == "yes":
499 if os.path.exists(prod_info.source_dir):
500 l_source_dir.append((prod_name, prod_info.source_dir))
502 l_sources_not_present.append(prod_name)
504 # if at least one of the application products has the "is_mesa" property
505 if src.get_property_in_product_cfg(prod_info, "is_mesa") == "yes":
506 generate_mesa_launcher = True # we will generate a mesa launcher
508 # ignore the native and fixed products for install directories
509 if (src.product.product_is_native(prod_info)
510 or src.product.product_is_fixed(prod_info)
511 or not src.product.product_compiles(prod_info)):
513 if src.product.check_installation(prod_info):
514 l_install_dir.append((prod_name, prod_info.install_dir))
516 l_not_installed.append(prod_name)
518 # Add also the cpp generated modules (if any)
519 if src.product.product_is_cpp(prod_info):
521 for name_cpp in src.product.get_product_components(prod_info):
522 install_dir = os.path.join(config.APPLICATION.workdir,
524 if os.path.exists(install_dir):
525 l_install_dir.append((name_cpp, install_dir))
527 l_not_installed.append(name_cpp)
529 # check the name of the directory that (could) contains the binaries
530 # from previous detar
531 binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
532 if os.path.exists(binaries_from_detar):
534 WARNING: existing binaries directory from previous detar installation:
536 To make new package from this, you have to:
537 1) install binaries in INSTALL directory with the script "install_bin.sh"
538 see README file for more details
539 2) or recompile everything in INSTALL with "sat compile" command
540 this step is long, and requires some linux packages to be installed
542 """ % binaries_from_detar)
544 # Print warning or error if there are some missing products
545 if len(l_not_installed) > 0:
546 text_missing_prods = ""
547 for p_name in l_not_installed:
548 text_missing_prods += "-" + p_name + "\n"
549 if not options.force_creation:
550 msg = _("ERROR: there are missing products installations:")
551 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
556 msg = _("WARNING: there are missing products installations:")
557 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
561 # Do the same for sources
562 if len(l_sources_not_present) > 0:
563 text_missing_prods = ""
564 for p_name in l_sources_not_present:
565 text_missing_prods += "-" + p_name + "\n"
566 if not options.force_creation:
567 msg = _("ERROR: there are missing products sources:")
568 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
573 msg = _("WARNING: there are missing products sources:")
574 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
578 # construct the name of the directory that will contain the binaries
579 binaries_dir_name = "BINARIES-" + config.VARS.dist
581 # construct the correlation table between the product names, there
582 # actual install directories and there install directory in archive
584 for prod_name, install_dir in l_install_dir:
585 path_in_archive = os.path.join(binaries_dir_name, prod_name)
586 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
588 for prod_name, source_dir in l_source_dir:
589 path_in_archive = os.path.join("SOURCES", prod_name)
590 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
592 # for packages of SALOME applications including KERNEL,
593 # we produce a salome launcher or a virtual application (depending on salome version)
594 if 'KERNEL' in config.APPLICATION.products:
595 VersionSalome = src.get_salome_version(config)
596 # Case where SALOME has the launcher that uses the SalomeContext API
597 if VersionSalome >= 730:
598 # create the relative launcher and add it to the files to add
599 launcher_name = src.get_launcher_name(config)
600 launcher_package = produce_relative_launcher(config,
605 not(options.without_commercial))
606 d_products["launcher"] = (launcher_package, launcher_name)
608 # if the application contains mesa products, we generate in addition to the
609 # classical salome launcher a launcher using mesa and called mesa_salome
610 # (the mesa launcher will be used for remote usage through ssh).
611 if generate_mesa_launcher:
612 #if there is one : store the use_mesa property
613 restore_use_mesa_option=None
614 if ('properties' in config.APPLICATION and
615 'use_mesa' in config.APPLICATION.properties):
616 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
618 # activate mesa property, and generate a mesa launcher
619 src.activate_mesa_property(config) #activate use_mesa property
620 launcher_mesa_name="mesa_"+launcher_name
621 launcher_package_mesa = produce_relative_launcher(config,
626 not(options.without_commercial))
627 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
629 # if there was a use_mesa value, we restore it
630 # else we set it to the default value "no"
631 if restore_use_mesa_option != None:
632 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
634 config.APPLICATION.properties.use_mesa="no"
637 # if we mix binaries and sources, we add a copy of the launcher,
638 # prefixed with "bin",in order to avoid clashes
639 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
641 # Provide a script for the creation of an application EDF style
642 appli_script = product_appli_creation_script(config,
647 d_products["appli script"] = (appli_script, "create_appli.py")
649 # Put also the environment file
650 env_file = produce_relative_env_files(config,
655 d_products["environment file"] = (env_file, "env_launch.sh")
659 def source_package(sat, config, logger, options, tmp_working_dir):
660 '''Prepare a dictionary that stores all the needed directories and files to
661 add in a source package.
663 :param config Config: The global configuration.
664 :param logger Logger: the logging instance
665 :param options OptResult: the options of the launched command
666 :param tmp_working_dir str: The temporary local directory containing some
667 specific directories or files needed in the
669 :return: the dictionary that stores all the needed directories and files to
670 add in a source package.
671 {label : (path_on_local_machine, path_in_archive)}
675 # Get all the products that are prepared using an archive
676 logger.write("Find archive products ... ")
677 d_archives, l_pinfo_vcs = get_archives(config, logger)
678 logger.write("Done\n")
680 if not options.with_vcs and len(l_pinfo_vcs) > 0:
681 # Make archives with the products that are not prepared using an archive
682 # (git, cvs, svn, etc)
683 logger.write("Construct archives for vcs products ... ")
684 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
689 logger.write("Done\n")
692 logger.write("Create the project ... ")
693 d_project = create_project_for_src_package(config,
696 logger.write("Done\n")
699 tmp_sat = add_salomeTools(config, tmp_working_dir)
700 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
702 # Add a sat symbolic link if not win
703 if not src.architecture.is_windows():
704 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
708 # In the jobs, os.getcwd() can fail
709 t = config.LOCAL.workdir
710 os.chdir(tmp_working_dir)
711 if os.path.lexists(tmp_satlink_path):
712 os.remove(tmp_satlink_path)
713 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
716 d_sat["sat link"] = (tmp_satlink_path, "sat")
718 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
721 def get_archives(config, logger):
722 '''Find all the products that are get using an archive and all the products
723 that are get using a vcs (git, cvs, svn) repository.
725 :param config Config: The global configuration.
726 :param logger Logger: the logging instance
727 :return: the dictionary {name_product :
728 (local path of its archive, path in the package of its archive )}
729 and the list of specific configuration corresponding to the vcs
733 # Get the list of product informations
734 l_products_name = config.APPLICATION.products.keys()
735 l_product_info = src.product.get_products_infos(l_products_name,
739 for p_name, p_info in l_product_info:
740 # skip product with property not_in_package set to yes
741 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
743 # ignore the native and fixed products
744 if (src.product.product_is_native(p_info)
745 or src.product.product_is_fixed(p_info)):
747 if p_info.get_source == "archive":
748 archive_path = p_info.archive_info.archive_name
749 archive_name = os.path.basename(archive_path)
751 l_pinfo_vcs.append((p_name, p_info))
753 d_archives[p_name] = (archive_path,
754 os.path.join(ARCHIVE_DIR, archive_name))
755 return d_archives, l_pinfo_vcs
757 def add_salomeTools(config, tmp_working_dir):
758 '''Prepare a version of salomeTools that has a specific local.pyconf file
759 configured for a source package.
761 :param config Config: The global configuration.
762 :param tmp_working_dir str: The temporary local directory containing some
763 specific directories or files needed in the
765 :return: The path to the local salomeTools directory to add in the package
768 # Copy sat in the temporary working directory
769 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
770 sat_running_path = src.Path(config.VARS.salometoolsway)
771 sat_running_path.copy(sat_tmp_path)
773 # Update the local.pyconf file that contains the path to the project
774 local_pyconf_name = "local.pyconf"
775 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
776 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
777 # Remove the .pyconf file in the root directory of salomeTools if there is
778 # any. (For example when launching jobs, a pyconf file describing the jobs
779 # can be here and is not useful)
780 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
781 for file_or_dir in files_or_dir_SAT:
782 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
783 file_path = os.path.join(tmp_working_dir,
788 ff = open(local_pyconf_file, "w")
789 ff.write(LOCAL_TEMPLATE)
792 return sat_tmp_path.path
794 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
795 '''For sources package that require that all products are get using an
796 archive, one has to create some archive for the vcs products.
797 So this method calls the clean and source command of sat and then create
800 :param l_pinfo_vcs List: The list of specific configuration corresponding to
802 :param sat Sat: The Sat instance that can be called to clean and source the
804 :param config Config: The global configuration.
805 :param logger Logger: the logging instance
806 :param tmp_working_dir str: The temporary local directory containing some
807 specific directories or files needed in the
809 :return: the dictionary that stores all the archives to add in the source
810 package. {label : (path_on_local_machine, path_in_archive)}
813 # clean the source directory of all the vcs products, then use the source
814 # command and thus construct an archive that will not contain the patches
815 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
816 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
817 logger.write(_("\nclean sources\n"))
818 args_clean = config.VARS.application
819 args_clean += " --sources --products "
820 args_clean += ",".join(l_prod_names)
821 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
822 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
825 logger.write(_("get sources\n"))
826 args_source = config.VARS.application
827 args_source += " --products "
828 args_source += ",".join(l_prod_names)
829 svgDir = sat.cfg.APPLICATION.workdir
830 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
831 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
832 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
833 # DBG.write("sat config id", id(sat.cfg), True)
834 # shit as config is not same id() as for sat.source()
835 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
837 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
839 # make the new archives
841 for pn, pinfo in l_pinfo_vcs:
842 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
843 logger.write("make archive vcs '%s'\n" % path_archive)
844 d_archives_vcs[pn] = (path_archive,
845 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
846 sat.cfg.APPLICATION.workdir = svgDir
847 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
848 return d_archives_vcs
850 def make_archive(prod_name, prod_info, where):
851 '''Create an archive of a product by searching its source directory.
853 :param prod_name str: The name of the product.
854 :param prod_info Config: The specific configuration corresponding to the
856 :param where str: The path of the repository where to put the resulting
858 :return: The path of the resulting archive
861 path_targz_prod = os.path.join(where, prod_name + ".tgz")
862 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
863 local_path = prod_info.source_dir
864 tar_prod.add(local_path,
866 exclude=exclude_VCS_and_extensions)
868 return path_targz_prod
870 def create_project_for_src_package(config, tmp_working_dir, with_vcs):
871 '''Create a specific project for a source package.
873 :param config Config: The global configuration.
874 :param tmp_working_dir str: The temporary local directory containing some
875 specific directories or files needed in the
877 :param with_vcs boolean: True if the package is with vcs products (not
878 transformed into archive products)
879 :return: The dictionary
880 {"project" : (produced project, project path in the archive)}
884 # Create in the working temporary directory the full project tree
885 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
886 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
888 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
891 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
894 patches_tmp_dir = os.path.join(project_tmp_dir,
897 application_tmp_dir = os.path.join(project_tmp_dir,
899 for directory in [project_tmp_dir,
900 compil_scripts_tmp_dir,
903 application_tmp_dir]:
904 src.ensure_path_exists(directory)
906 # Create the pyconf that contains the information of the project
907 project_pyconf_name = "project.pyconf"
908 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
909 ff = open(project_pyconf_file, "w")
910 ff.write(PROJECT_TEMPLATE)
913 # Loop over the products to get there pyconf and all the scripts
914 # (compilation, environment, patches)
915 # and create the pyconf file to add to the project
916 lproducts_name = config.APPLICATION.products.keys()
917 l_products = src.product.get_products_infos(lproducts_name, config)
918 for p_name, p_info in l_products:
919 # skip product with property not_in_package set to yes
920 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
922 find_product_scripts_and_pyconf(p_name,
926 compil_scripts_tmp_dir,
929 products_pyconf_tmp_dir)
931 find_application_pyconf(config, application_tmp_dir)
933 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
936 def find_product_scripts_and_pyconf(p_name,
940 compil_scripts_tmp_dir,
943 products_pyconf_tmp_dir):
944 '''Create a specific pyconf file for a given product. Get its environment
945 script, its compilation script and patches and put it in the temporary
946 working directory. This method is used in the source package in order to
947 construct the specific project.
949 :param p_name str: The name of the product.
950 :param p_info Config: The specific configuration corresponding to the
952 :param config Config: The global configuration.
953 :param with_vcs boolean: True if the package is with vcs products (not
954 transformed into archive products)
955 :param compil_scripts_tmp_dir str: The path to the temporary compilation
956 scripts directory of the project.
957 :param env_scripts_tmp_dir str: The path to the temporary environment script
958 directory of the project.
959 :param patches_tmp_dir str: The path to the temporary patch scripts
960 directory of the project.
961 :param products_pyconf_tmp_dir str: The path to the temporary product
962 scripts directory of the project.
965 # read the pyconf of the product
966 product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
967 config.PATHS.PRODUCTPATH)
968 product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
970 # find the compilation script if any
971 if src.product.product_has_script(p_info):
972 compil_script_path = src.Path(p_info.compil_script)
973 compil_script_path.copy(compil_scripts_tmp_dir)
974 product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
975 p_info.compil_script)
976 # find the environment script if any
977 if src.product.product_has_env_script(p_info):
978 env_script_path = src.Path(p_info.environ.env_script)
979 env_script_path.copy(env_scripts_tmp_dir)
980 product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
981 p_info.environ.env_script)
982 # find the patches if any
983 if src.product.product_has_patches(p_info):
984 patches = src.pyconf.Sequence()
985 for patch_path in p_info.patches:
986 p_path = src.Path(patch_path)
987 p_path.copy(patches_tmp_dir)
988 patches.append(os.path.basename(patch_path), "")
990 product_pyconf_cfg[p_info.section].patches = patches
993 # put in the pyconf file the resolved values
994 for info in ["git_info", "cvs_info", "svn_info"]:
996 for key in p_info[info]:
997 product_pyconf_cfg[p_info.section][info][key] = p_info[
1000 # if the product is not archive, then make it become archive.
1001 if src.product.product_is_vcs(p_info):
1002 product_pyconf_cfg[p_info.section].get_source = "archive"
1003 if not "archive_info" in product_pyconf_cfg[p_info.section]:
1004 product_pyconf_cfg[p_info.section].addMapping("archive_info",
1005 src.pyconf.Mapping(product_pyconf_cfg),
1007 product_pyconf_cfg[p_info.section
1008 ].archive_info.archive_name = p_info.name + ".tgz"
1010 # write the pyconf file to the temporary project location
1011 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1013 ff = open(product_tmp_pyconf_path, 'w')
1014 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1015 product_pyconf_cfg.__save__(ff, 1)
1018 def find_application_pyconf(config, application_tmp_dir):
1019 '''Find the application pyconf file and put it in the specific temporary
1020 directory containing the specific project of a source package.
1022 :param config Config: The global configuration.
1023 :param application_tmp_dir str: The path to the temporary application
1024 scripts directory of the project.
1026 # read the pyconf of the application
1027 application_name = config.VARS.application
1028 application_pyconf_path = src.find_file_in_lpath(
1029 application_name + ".pyconf",
1030 config.PATHS.APPLICATIONPATH)
1031 application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1033 # Change the workdir
1034 application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1035 application_pyconf_cfg,
1037 'VARS.salometoolsway + $VARS.sep + ".."')
1039 # Prevent from compilation in base
1040 application_pyconf_cfg.APPLICATION.no_base = "yes"
1042 # write the pyconf file to the temporary application location
1043 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1044 application_name + ".pyconf")
1045 ff = open(application_tmp_pyconf_path, 'w')
1046 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1047 application_pyconf_cfg.__save__(ff, 1)
1050 def project_package(config, name_project, project_file_path, tmp_working_dir, logger):
1051 '''Prepare a dictionary that stores all the needed directories and files to
1052 add in a project package.
1054 :param project_file_path str: The path to the local project.
1055 :param tmp_working_dir str: The temporary local directory containing some
1056 specific directories or files needed in the
1058 :return: the dictionary that stores all the needed directories and files to
1059 add in a project package.
1060 {label : (path_on_local_machine, path_in_archive)}
1064 # Read the project file and get the directories to add to the package
1067 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1070 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1071 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1072 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1074 paths = {"ARCHIVEPATH" : "archives",
1075 "APPLICATIONPATH" : "applications",
1076 "PRODUCTPATH" : "products",
1078 "MACHINEPATH" : "machines"}
1079 # Loop over the project paths and add it
1081 if path not in project_pyconf_cfg:
1083 # Add the directory to the files to add in the package
1084 d_project[path] = (project_pyconf_cfg[path], paths[path])
1085 # Modify the value of the path in the package
1086 project_pyconf_cfg[path] = src.pyconf.Reference(
1089 'project_path + "/' + paths[path] + '"')
1091 # Modify some values
1092 if "project_path" not in project_pyconf_cfg:
1093 project_pyconf_cfg.addMapping("project_path",
1094 src.pyconf.Mapping(project_pyconf_cfg),
1096 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1100 # Write the project pyconf file
1101 project_file_name = os.path.basename(project_file_path)
1102 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1103 ff = open(project_pyconf_tmp_path, 'w')
1104 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1105 project_pyconf_cfg.__save__(ff, 1)
1107 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_name)
1111 def add_readme(config, options, where):
1112 readme_path = os.path.join(where, "README")
1113 with codecs.open(readme_path, "w", 'utf-8') as f:
1115 # templates for building the header
1117 # This package was generated with sat $version
1120 # Distribution : $dist
1122 In the following, $$ROOT represents the directory where you have installed
1123 SALOME (the directory where this file is located).
1126 readme_compilation_with_binaries="""
1128 compilation based on the binaries used as prerequisites
1129 =======================================================
1131 If you fail to compile the complete application (for example because
1132 you are not root on your system and cannot install missing packages), you
1133 may try a partial compilation based on the binaries.
1134 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1135 and do some substitutions on cmake and .la files (replace the build directories
1137 The procedure to do it is:
1138 1) Remove or rename INSTALL directory if it exists
1139 2) Execute the shell script install_bin.sh:
1142 3) Use SalomeTool (as explained in Sources section) and compile only the
1143 modules you need to (with -p option)
1146 readme_header_tpl=string.Template(readme_header)
1147 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1148 "README_BIN.template")
1149 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1150 "README_LAUNCHER.template")
1151 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1152 "README_BIN_VIRTUAL_APP.template")
1153 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1154 "README_SRC.template")
1155 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1156 "README_PROJECT.template")
1157 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1158 "README_SAT.template")
1160 # prepare substitution dictionary
1162 d['user'] = config.VARS.user
1163 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1164 d['version'] = config.INTERNAL.sat_version
1165 d['dist'] = config.VARS.dist
1166 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1168 if options.binaries or options.sources:
1169 d['application'] = config.VARS.application
1170 f.write("# Application: " + d['application'] + "\n")
1171 if 'KERNEL' in config.APPLICATION.products:
1172 VersionSalome = src.get_salome_version(config)
1173 # Case where SALOME has the launcher that uses the SalomeContext API
1174 if VersionSalome >= 730:
1175 d['launcher'] = config.APPLICATION.profile.launcher_name
1177 d['virtual_app'] = 'runAppli' # this info is not used now)
1179 # write the specific sections
1180 if options.binaries:
1181 f.write(src.template.substitute(readme_template_path_bin, d))
1182 if "virtual_app" in d:
1183 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1185 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1188 f.write(src.template.substitute(readme_template_path_src, d))
1190 if options.binaries and options.sources:
1191 f.write(readme_compilation_with_binaries)
1194 f.write(src.template.substitute(readme_template_path_pro, d))
1197 f.write(src.template.substitute(readme_template_path_sat, d))
1201 def update_config(config, prop, value):
1202 '''Remove from config.APPLICATION.products the products that have the property given as input.
1204 :param config Config: The global config.
1205 :param prop str: The property to filter
1206 :param value str: The value of the property to filter
1208 src.check_config_has_application(config)
1209 l_product_to_remove = []
1210 for product_name in config.APPLICATION.products.keys():
1211 prod_cfg = src.product.get_product_config(config, product_name)
1212 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1213 l_product_to_remove.append(product_name)
1214 for product_name in l_product_to_remove:
1215 config.APPLICATION.products.__delitem__(product_name)
1218 '''method that is called when salomeTools is called with --help option.
1220 :return: The text to display for the package command description.
1224 The package command creates a tar file archive of a product.
1225 There are four kinds of archive, which can be mixed:
1227 1 - The binary archive.
1228 It contains the product installation directories plus a launcher.
1229 2 - The sources archive.
1230 It contains the product archives, a project (the application plus salomeTools).
1231 3 - The project archive.
1232 It contains a project (give the project file path as argument).
1233 4 - The salomeTools archive.
1234 It contains code utility salomeTools.
1237 >> sat package SALOME-master --binaries --sources""")
1239 def run(args, runner, logger):
1240 '''method that is called when salomeTools is called with package parameter.
1244 (options, args) = parser.parse_args(args)
1246 # Check that a type of package is called, and only one
1247 all_option_types = (options.binaries,
1249 options.project not in ["", None],
1252 # Check if no option for package type
1253 if all_option_types.count(True) == 0:
1254 msg = _("Error: Precise a type for the package\nUse one of the "
1255 "following options: --binaries, --sources, --project or"
1257 logger.write(src.printcolors.printcError(msg), 1)
1258 logger.write("\n", 1)
1261 # The repository where to put the package if not Binary or Source
1262 package_default_path = runner.cfg.LOCAL.workdir
1264 # if the package contains binaries or sources:
1265 if options.binaries or options.sources:
1266 # Check that the command has been called with an application
1267 src.check_config_has_application(runner.cfg)
1269 # Display information
1270 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1271 runner.cfg.VARS.application), 1)
1273 # Get the default directory where to put the packages
1274 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir,
1276 src.ensure_path_exists(package_default_path)
1278 # if the package contains a project:
1280 # check that the project is visible by SAT
1281 projectNameFile = options.project + ".pyconf"
1283 for i in runner.cfg.PROJECTS.project_file_paths:
1284 baseName = os.path.basename(i)
1285 if baseName == projectNameFile:
1289 if foundProject is None:
1290 local_path = os.path.join(runner.cfg.VARS.salometoolsway,
1293 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1297 Please add it in file:
1299 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1300 logger.write(src.printcolors.printcError(msg), 1)
1301 logger.write("\n", 1)
1304 options.project_file_path = foundProject
1305 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1307 # Remove the products that are filtered by the --without_property option
1308 if options.without_property:
1309 [prop, value] = options.without_property.split(":")
1310 update_config(runner.cfg, prop, value)
1312 # get the name of the archive or build it
1314 if os.path.basename(options.name) == options.name:
1315 # only a name (not a path)
1316 archive_name = options.name
1317 dir_name = package_default_path
1319 archive_name = os.path.basename(options.name)
1320 dir_name = os.path.dirname(options.name)
1322 # suppress extension
1323 if archive_name[-len(".tgz"):] == ".tgz":
1324 archive_name = archive_name[:-len(".tgz")]
1325 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1326 archive_name = archive_name[:-len(".tar.gz")]
1330 dir_name = package_default_path
1331 if options.binaries or options.sources:
1332 archive_name = runner.cfg.APPLICATION.name
1334 if options.binaries:
1335 archive_name += "-"+runner.cfg.VARS.dist
1338 archive_name += "-SRC"
1339 if options.with_vcs:
1340 archive_name += "-VCS"
1343 project_name = options.project
1344 archive_name += ("PROJECT-" + project_name)
1347 archive_name += ("salomeTools_" + runner.cfg.INTERNAL.sat_version)
1348 if len(archive_name)==0: # no option worked
1349 msg = _("Error: Cannot name the archive\n"
1350 " check if at least one of the following options was "
1351 "selected : --binaries, --sources, --project or"
1353 logger.write(src.printcolors.printcError(msg), 1)
1354 logger.write("\n", 1)
1357 path_targz = os.path.join(dir_name, archive_name + ".tgz")
1359 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1361 # Create a working directory for all files that are produced during the
1362 # package creation and that will be removed at the end of the command
1363 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1364 src.ensure_path_exists(tmp_working_dir)
1365 logger.write("\n", 5)
1366 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1368 logger.write("\n", 3)
1370 msg = _("Preparation of files to add to the archive")
1371 logger.write(src.printcolors.printcLabel(msg), 2)
1372 logger.write("\n", 2)
1374 d_files_to_add={} # content of the archive
1376 # a dict to hold paths that will need to be substitute for users recompilations
1377 d_paths_to_substitute={}
1379 if options.binaries:
1380 d_bin_files_to_add = binary_package(runner.cfg,
1384 # for all binaries dir, store the substitution that will be required
1385 # for extra compilations
1386 for key in d_bin_files_to_add:
1387 if key.endswith("(bin)"):
1388 source_dir = d_bin_files_to_add[key][0]
1389 path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" + runner.cfg.VARS.dist,"INSTALL")
1390 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1391 # if basename is the same we will just substitute the dirname
1392 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1393 os.path.dirname(path_in_archive)
1395 d_paths_to_substitute[source_dir]=path_in_archive
1397 d_files_to_add.update(d_bin_files_to_add)
1400 d_files_to_add.update(source_package(runner,
1405 if options.binaries:
1406 # for archives with bin and sources we provide a shell script able to
1407 # install binaries for compilation
1408 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1410 d_paths_to_substitute,
1412 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1413 logger.write("substitutions that need to be done later : \n", 5)
1414 logger.write(str(d_paths_to_substitute), 5)
1415 logger.write("\n", 5)
1417 # --salomeTool option is not considered when --sources is selected, as this option
1418 # already brings salomeTool!
1420 d_files_to_add.update({"salomeTools" : (runner.cfg.VARS.salometoolsway, "")})
1423 DBG.write("config for package %s" % project_name, runner.cfg)
1424 d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, tmp_working_dir, logger))
1426 if not(d_files_to_add):
1427 msg = _("Error: Empty dictionnary to build the archive!\n")
1428 logger.write(src.printcolors.printcError(msg), 1)
1429 logger.write("\n", 1)
1432 # Add the README file in the package
1433 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1434 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1436 # Add the additional files of option add_files
1437 if options.add_files:
1438 for file_path in options.add_files:
1439 if not os.path.exists(file_path):
1440 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1442 file_name = os.path.basename(file_path)
1443 d_files_to_add[file_name] = (file_path, file_name)
1445 logger.write("\n", 2)
1447 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1448 logger.write("\n", 2)
1452 # Creating the object tarfile
1453 tar = tarfile.open(path_targz, mode='w:gz')
1455 # get the filtering function if needed
1456 filter_function = exclude_VCS_and_extensions
1458 # Add the files to the tarfile object
1459 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1461 except KeyboardInterrupt:
1462 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1463 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1464 # remove the working directory
1465 shutil.rmtree(tmp_working_dir)
1466 logger.write(_("OK"), 1)
1467 logger.write(_("\n"), 1)
1470 # unconditionaly remove the tmp_local_working_dir
1471 tmp_local_working_dir = os.path.join(runner.cfg.APPLICATION.workdir, "tmp_package")
1472 if os.path.isdir(tmp_local_working_dir):
1473 shutil.rmtree(tmp_local_working_dir)
1475 # have to decide some time
1476 DBG.tofix("make shutil.rmtree('%s') effective" % tmp_working_dir, "", DBG.isDeveloper())
1478 # Print again the path of the package
1479 logger.write("\n", 2)
1480 src.printcolors.print_value(logger, "Package path", path_targz, 2)