3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
29 from application import get_SALOME_modules
30 import src.debug as DBG
37 ARCHIVE_DIR = "ARCHIVES"
38 PROJECT_DIR = "PROJECT"
40 IGNORED_DIRS = [".git", ".svn"]
41 IGNORED_EXTENSIONS = []
43 PROJECT_TEMPLATE = """#!/usr/bin/env python
46 # The path to the archive root directory
47 root_path : $PWD + "/../"
49 project_path : $PWD + "/"
51 # Where to search the archives of the products
52 ARCHIVEPATH : $root_path + "ARCHIVES"
53 # Where to search the pyconf of the applications
54 APPLICATIONPATH : $project_path + "applications/"
55 # Where to search the pyconf of the products
56 PRODUCTPATH : $project_path + "products/"
57 # Where to search the pyconf of the jobs of the project
58 JOBPATH : $project_path + "jobs/"
59 # Where to search the pyconf of the machines of the project
60 MACHINEPATH : $project_path + "machines/"
63 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
71 archive_dir : 'default'
78 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
79 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
83 # Define all possible option for the package command : sat package <options>
84 parser = src.options.Options()
85 parser.add_option('b', 'binaries', 'boolean', 'binaries',
86 _('Optional: Produce a binary package.'), False)
87 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
88 _('Optional: Only binary package: produce the archive even if '
89 'there are some missing products.'), False)
90 parser.add_option('s', 'sources', 'boolean', 'sources',
91 _('Optional: Produce a compilable archive of the sources of the '
92 'application.'), False)
93 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
94 _('Optional: Only source package: do not make archive of vcs products.'),
96 parser.add_option('p', 'project', 'string', 'project',
97 _('Optional: Produce an archive that contains a project.'), "")
98 parser.add_option('t', 'salometools', 'boolean', 'sat',
99 _('Optional: Produce an archive that contains salomeTools.'), False)
100 parser.add_option('n', 'name', 'string', 'name',
101 _('Optional: The name or full path of the archive.'), None)
102 parser.add_option('', 'add_files', 'list2', 'add_files',
103 _('Optional: The list of additional files to add to the archive.'), [])
104 parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
105 _('Optional: do not add commercial licence.'), False)
106 parser.add_option('', 'without_property', 'string', 'without_property',
107 _('Optional: Filter the products by their properties.\n\tSyntax: '
108 '--without_property <property>:<value>'))
111 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
112 '''Create an archive containing all directories and files that are given in
113 the d_content argument.
115 :param tar tarfile: The tarfile instance used to make the archive.
116 :param name_archive str: The name of the archive to make.
117 :param d_content dict: The dictionary that contain all directories and files
118 to add in the archive.
120 (path_on_local_machine, path_in_archive)
121 :param logger Logger: the logging instance
122 :param f_exclude Function: the function that filters
123 :return: 0 if success, 1 if not.
126 # get the max length of the messages in order to make the display
127 max_len = len(max(d_content.keys(), key=len))
130 # loop over each directory or file stored in the d_content dictionary
131 for name in sorted(d_content.keys()):
132 # display information
133 len_points = max_len - len(name)
134 local_path, archive_path = d_content[name]
135 in_archive = os.path.join(name_archive, archive_path)
136 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
137 # Get the local path and the path in archive
138 # of the directory or file to add
139 # Add it in the archive
141 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
142 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
143 except Exception as e:
144 logger.write(src.printcolors.printcError(_("KO ")), 3)
145 logger.write(str(e), 3)
147 logger.write("\n", 3)
150 def exclude_VCS_and_extensions(filename):
151 ''' The function that is used to exclude from package the link to the
152 VCS repositories (like .git)
154 :param filename Str: The filname to exclude (or not).
155 :return: True if the file has to be exclude
158 for dir_name in IGNORED_DIRS:
159 if dir_name in filename:
161 for extension in IGNORED_EXTENSIONS:
162 if filename.endswith(extension):
166 def produce_relative_launcher(config,
171 with_commercial=True):
172 '''Create a specific SALOME launcher for the binary package. This launcher
175 :param config Config: The global configuration.
176 :param logger Logger: the logging instance
177 :param file_dir str: the directory where to put the launcher
178 :param file_name str: The launcher name
179 :param binaries_dir_name str: the name of the repository where the binaries
181 :return: the path of the produced launcher
185 # get KERNEL installation path
186 kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
188 # set kernel bin dir (considering fhs property)
189 kernel_cfg = src.product.get_product_config(config, "KERNEL")
190 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
191 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
193 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
195 # check if the application contains an application module
196 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(),
198 salome_application_name="Not defined"
199 for prod_name, prod_info in l_product_info:
200 # look for a salome application
201 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
202 salome_application_name=prod_info.name
204 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
205 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
206 if salome_application_name == "Not defined":
207 app_root_dir=kernel_root_dir
209 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
211 # Get the launcher template and do substitutions
212 withProfile = src.fileEnviron.withProfile
214 withProfile = withProfile.replace(
215 "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
216 "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + app_root_dir + "'")
217 withProfile = withProfile.replace(
218 " 'BIN_KERNEL_INSTALL_DIR'",
219 " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
221 before, after = withProfile.split(
222 "# here your local standalone environment\n")
224 # create an environment file writer
225 writer = src.environment.FileEnvWriter(config,
230 filepath = os.path.join(file_dir, file_name)
231 # open the file and write into it
232 launch_file = open(filepath, "w")
233 launch_file.write(before)
235 writer.write_cfgForPy_file(launch_file,
236 for_package = binaries_dir_name,
237 with_commercial=with_commercial)
238 launch_file.write(after)
241 # Little hack to put out_dir_Path outside the strings
242 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
244 # A hack to put a call to a file for distene licence.
245 # It does nothing to an application that has no distene product
246 hack_for_distene_licence(filepath)
248 # change the rights in order to make the file executable for everybody
260 def hack_for_distene_licence(filepath):
261 '''Replace the distene licence env variable by a call to a file.
263 :param filepath Str: The path to the launcher to modify.
265 shutil.move(filepath, filepath + "_old")
267 filein = filepath + "_old"
268 fin = open(filein, "r")
269 fout = open(fileout, "w")
270 text = fin.readlines()
271 # Find the Distene section
273 for i,line in enumerate(text):
274 if "# Set DISTENE License" in line:
278 # No distene product, there is nothing to do
284 del text[num_line +1]
285 del text[num_line +1]
286 text_to_insert =""" import imp
288 distene = imp.load_source('distene_licence', '/data/tmpsalome/salome/prerequis/install/LICENSE/dlim8.var.py')
289 distene.set_distene_variables(context)
292 text.insert(num_line + 1, text_to_insert)
299 def produce_relative_env_files(config,
303 '''Create some specific environment files for the binary package. These
304 files use relative paths.
306 :param config Config: The global configuration.
307 :param logger Logger: the logging instance
308 :param file_dir str: the directory where to put the files
309 :param binaries_dir_name str: the name of the repository where the binaries
311 :return: the list of path of the produced environment files
314 # create an environment file writer
315 writer = src.environment.FileEnvWriter(config,
321 filepath = writer.write_env_file("env_launch.sh",
324 for_package = binaries_dir_name)
326 # Little hack to put out_dir_Path as environment variable
327 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
329 # change the rights in order to make the file executable for everybody
341 def produce_install_bin_file(config,
346 '''Create a bash shell script which do substitutions in BIRARIES dir
347 in order to use it for extra compilations.
349 :param config Config: The global configuration.
350 :param logger Logger: the logging instance
351 :param file_dir str: the directory where to put the files
352 :param d_sub, dict: the dictionnary that contains the substitutions to be done
353 :param file_name str: the name of the install script file
354 :return: the produced file
358 filepath = os.path.join(file_dir, file_name)
359 # open the file and write into it
360 # use codec utf-8 as sat variables are in unicode
361 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
362 installbin_template_path = os.path.join(config.VARS.internal_dir,
363 "INSTALL_BIN.template")
365 # build the name of the directory that will contain the binaries
366 binaries_dir_name = "BINARIES-" + config.VARS.dist
367 # build the substitution loop
368 loop_cmd = "for f in $(grep -RIl"
370 loop_cmd += " -e "+ key
371 loop_cmd += ' INSTALL); do\n sed -i "\n'
373 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
374 loop_cmd += ' " $f\ndone'
377 d["BINARIES_DIR"] = binaries_dir_name
378 d["SUBSTITUTION_LOOP"]=loop_cmd
380 # substitute the template and write it in file
381 content=src.template.substitute(installbin_template_path, d)
382 installbin_file.write(content)
383 # change the rights in order to make the file executable for everybody
395 def product_appli_creation_script(config,
399 '''Create a script that can produce an application (EDF style) in the binary
402 :param config Config: The global configuration.
403 :param logger Logger: the logging instance
404 :param file_dir str: the directory where to put the file
405 :param binaries_dir_name str: the name of the repository where the binaries
407 :return: the path of the produced script file
410 template_name = "create_appli.py.for_bin_packages.template"
411 template_path = os.path.join(config.VARS.internal_dir, template_name)
412 text_to_fill = open(template_path, "r").read()
413 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
414 '"' + binaries_dir_name + '"')
417 for product_name in get_SALOME_modules(config):
418 product_info = src.product.get_product_config(config, product_name)
420 if src.product.product_is_smesh_plugin(product_info):
423 if 'install_dir' in product_info and bool(product_info.install_dir):
424 if src.product.product_is_cpp(product_info):
426 for cpp_name in src.product.get_product_components(product_info):
427 line_to_add = ("<module name=\"" +
429 "\" gui=\"yes\" path=\"''' + "
430 "os.path.join(dir_bin_name, \"" +
431 cpp_name + "\") + '''\"/>")
434 line_to_add = ("<module name=\"" +
436 "\" gui=\"yes\" path=\"''' + "
437 "os.path.join(dir_bin_name, \"" +
438 product_name + "\") + '''\"/>")
439 text_to_add += line_to_add + "\n"
441 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
443 tmp_file_path = os.path.join(file_dir, "create_appli.py")
444 ff = open(tmp_file_path, "w")
445 ff.write(filled_text)
448 # change the rights in order to make the file executable for everybody
449 os.chmod(tmp_file_path,
460 def binary_package(config, logger, options, tmp_working_dir):
461 '''Prepare a dictionary that stores all the needed directories and files to
462 add in a binary package.
464 :param config Config: The global configuration.
465 :param logger Logger: the logging instance
466 :param options OptResult: the options of the launched command
467 :param tmp_working_dir str: The temporary local directory containing some
468 specific directories or files needed in the
470 :return: the dictionary that stores all the needed directories and files to
471 add in a binary package.
472 {label : (path_on_local_machine, path_in_archive)}
476 # Get the list of product installation to add to the archive
477 l_products_name = sorted(config.APPLICATION.products.keys())
478 l_product_info = src.product.get_products_infos(l_products_name,
483 l_sources_not_present = []
484 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
485 for prod_name, prod_info in l_product_info:
487 # Add the sources of the products that have the property
488 # sources_in_package : "yes"
489 if src.get_property_in_product_cfg(prod_info,
490 "sources_in_package") == "yes":
491 if os.path.exists(prod_info.source_dir):
492 l_source_dir.append((prod_name, prod_info.source_dir))
494 l_sources_not_present.append(prod_name)
496 # if at least one of the application products has the "is_mesa" property
497 if src.get_property_in_product_cfg(prod_info, "is_mesa") == "yes":
498 generate_mesa_launcher = True # we will generate a mesa launcher
500 # ignore the native and fixed products for install directories
501 if (src.product.product_is_native(prod_info)
502 or src.product.product_is_fixed(prod_info)
503 or not src.product.product_compiles(prod_info)):
505 if src.product.check_installation(prod_info):
506 l_install_dir.append((prod_name, prod_info.install_dir))
508 l_not_installed.append(prod_name)
510 # Add also the cpp generated modules (if any)
511 if src.product.product_is_cpp(prod_info):
513 for name_cpp in src.product.get_product_components(prod_info):
514 install_dir = os.path.join(config.APPLICATION.workdir,
516 if os.path.exists(install_dir):
517 l_install_dir.append((name_cpp, install_dir))
519 l_not_installed.append(name_cpp)
521 # check the name of the directory that (could) contains the binaries
522 # from previous detar
523 binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
524 if os.path.exists(binaries_from_detar):
526 WARNING: existing binaries directory from previous detar installation:
528 To make new package from this, you have to:
529 1) install binaries in INSTALL directory with the script "install_bin.sh"
530 see README file for more details
531 2) or recompile everything in INSTALL with "sat compile" command
532 this step is long, and requires some linux packages to be installed
534 """ % binaries_from_detar)
536 # Print warning or error if there are some missing products
537 if len(l_not_installed) > 0:
538 text_missing_prods = ""
539 for p_name in l_not_installed:
540 text_missing_prods += "-" + p_name + "\n"
541 if not options.force_creation:
542 msg = _("ERROR: there are missing products installations:")
543 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
548 msg = _("WARNING: there are missing products installations:")
549 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
553 # Do the same for sources
554 if len(l_sources_not_present) > 0:
555 text_missing_prods = ""
556 for p_name in l_sources_not_present:
557 text_missing_prods += "-" + p_name + "\n"
558 if not options.force_creation:
559 msg = _("ERROR: there are missing products sources:")
560 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
565 msg = _("WARNING: there are missing products sources:")
566 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
570 # construct the name of the directory that will contain the binaries
571 binaries_dir_name = "BINARIES-" + config.VARS.dist
573 # construct the correlation table between the product names, there
574 # actual install directories and there install directory in archive
576 for prod_name, install_dir in l_install_dir:
577 path_in_archive = os.path.join(binaries_dir_name, prod_name)
578 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
580 for prod_name, source_dir in l_source_dir:
581 path_in_archive = os.path.join("SOURCES", prod_name)
582 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
584 # for packages of SALOME applications including KERNEL,
585 # we produce a salome launcher or a virtual application (depending on salome version)
586 if 'KERNEL' in config.APPLICATION.products:
587 VersionSalome = src.get_salome_version(config)
588 # Case where SALOME has the launcher that uses the SalomeContext API
589 if VersionSalome >= 730:
590 # create the relative launcher and add it to the files to add
591 launcher_name = src.get_launcher_name(config)
592 launcher_package = produce_relative_launcher(config,
597 not(options.without_commercial))
598 d_products["launcher"] = (launcher_package, launcher_name)
600 # if the application contains mesa products, we generate in addition to the
601 # classical salome launcher a launcher using mesa and called mesa_salome
602 # (the mesa launcher will be used for remote usage through ssh).
603 if generate_mesa_launcher:
604 #if there is one : store the use_mesa property
605 restore_use_mesa_option=None
606 if ('properties' in config.APPLICATION and
607 'use_mesa' in config.APPLICATION.properties):
608 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
610 # activate mesa property, and generate a mesa launcher
611 src.activate_mesa_property(config) #activate use_mesa property
612 launcher_mesa_name="mesa_"+launcher_name
613 launcher_package_mesa = produce_relative_launcher(config,
618 not(options.without_commercial))
619 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
621 # if there was a use_mesa value, we restore it
622 # else we set it to the default value "no"
623 if restore_use_mesa_option != None:
624 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
626 config.APPLICATION.properties.use_mesa="no"
629 # if we mix binaries and sources, we add a copy of the launcher,
630 # prefixed with "bin",in order to avoid clashes
631 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
633 # Provide a script for the creation of an application EDF style
634 appli_script = product_appli_creation_script(config,
639 d_products["appli script"] = (appli_script, "create_appli.py")
641 # Put also the environment file
642 env_file = produce_relative_env_files(config,
647 d_products["environment file"] = (env_file, "env_launch.sh")
651 def source_package(sat, config, logger, options, tmp_working_dir):
652 '''Prepare a dictionary that stores all the needed directories and files to
653 add in a source package.
655 :param config Config: The global configuration.
656 :param logger Logger: the logging instance
657 :param options OptResult: the options of the launched command
658 :param tmp_working_dir str: The temporary local directory containing some
659 specific directories or files needed in the
661 :return: the dictionary that stores all the needed directories and files to
662 add in a source package.
663 {label : (path_on_local_machine, path_in_archive)}
667 # Get all the products that are prepared using an archive
668 logger.write("Find archive products ... ")
669 d_archives, l_pinfo_vcs = get_archives(config, logger)
670 logger.write("Done\n")
672 if not options.with_vcs and len(l_pinfo_vcs) > 0:
673 # Make archives with the products that are not prepared using an archive
674 # (git, cvs, svn, etc)
675 logger.write("Construct archives for vcs products ... ")
676 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
681 logger.write("Done\n")
684 logger.write("Create the project ... ")
685 d_project = create_project_for_src_package(config,
688 logger.write("Done\n")
691 tmp_sat = add_salomeTools(config, tmp_working_dir)
692 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
694 # Add a sat symbolic link if not win
695 if not src.architecture.is_windows():
696 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
700 # In the jobs, os.getcwd() can fail
701 t = config.LOCAL.workdir
702 os.chdir(tmp_working_dir)
703 if os.path.lexists(tmp_satlink_path):
704 os.remove(tmp_satlink_path)
705 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
708 d_sat["sat link"] = (tmp_satlink_path, "sat")
710 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
713 def get_archives(config, logger):
714 '''Find all the products that are get using an archive and all the products
715 that are get using a vcs (git, cvs, svn) repository.
717 :param config Config: The global configuration.
718 :param logger Logger: the logging instance
719 :return: the dictionary {name_product :
720 (local path of its archive, path in the package of its archive )}
721 and the list of specific configuration corresponding to the vcs
725 # Get the list of product informations
726 l_products_name = config.APPLICATION.products.keys()
727 l_product_info = src.product.get_products_infos(l_products_name,
731 for p_name, p_info in l_product_info:
732 # ignore the native and fixed products
733 if (src.product.product_is_native(p_info)
734 or src.product.product_is_fixed(p_info)):
736 if p_info.get_source == "archive":
737 archive_path = p_info.archive_info.archive_name
738 archive_name = os.path.basename(archive_path)
740 l_pinfo_vcs.append((p_name, p_info))
742 d_archives[p_name] = (archive_path,
743 os.path.join(ARCHIVE_DIR, archive_name))
744 return d_archives, l_pinfo_vcs
746 def add_salomeTools(config, tmp_working_dir):
747 '''Prepare a version of salomeTools that has a specific local.pyconf file
748 configured for a source package.
750 :param config Config: The global configuration.
751 :param tmp_working_dir str: The temporary local directory containing some
752 specific directories or files needed in the
754 :return: The path to the local salomeTools directory to add in the package
757 # Copy sat in the temporary working directory
758 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
759 sat_running_path = src.Path(config.VARS.salometoolsway)
760 sat_running_path.copy(sat_tmp_path)
762 # Update the local.pyconf file that contains the path to the project
763 local_pyconf_name = "local.pyconf"
764 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
765 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
766 # Remove the .pyconf file in the root directory of salomeTools if there is
767 # any. (For example when launching jobs, a pyconf file describing the jobs
768 # can be here and is not useful)
769 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
770 for file_or_dir in files_or_dir_SAT:
771 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
772 file_path = os.path.join(tmp_working_dir,
777 ff = open(local_pyconf_file, "w")
778 ff.write(LOCAL_TEMPLATE)
781 return sat_tmp_path.path
783 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
784 '''For sources package that require that all products are get using an
785 archive, one has to create some archive for the vcs products.
786 So this method calls the clean and source command of sat and then create
789 :param l_pinfo_vcs List: The list of specific configuration corresponding to
791 :param sat Sat: The Sat instance that can be called to clean and source the
793 :param config Config: The global configuration.
794 :param logger Logger: the logging instance
795 :param tmp_working_dir str: The temporary local directory containing some
796 specific directories or files needed in the
798 :return: the dictionary that stores all the archives to add in the source
799 package. {label : (path_on_local_machine, path_in_archive)}
802 # clean the source directory of all the vcs products, then use the source
803 # command and thus construct an archive that will not contain the patches
804 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
805 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
806 logger.write(_("\nclean sources\n"))
807 args_clean = config.VARS.application
808 args_clean += " --sources --products "
809 args_clean += ",".join(l_prod_names)
810 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
811 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
814 logger.write(_("get sources\n"))
815 args_source = config.VARS.application
816 args_source += " --products "
817 args_source += ",".join(l_prod_names)
818 svgDir = sat.cfg.APPLICATION.workdir
819 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
820 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
821 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
822 # DBG.write("sat config id", id(sat.cfg), True)
823 # shit as config is not same id() as for sat.source()
824 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
826 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
828 # make the new archives
830 for pn, pinfo in l_pinfo_vcs:
831 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
832 logger.write("make archive vcs '%s'\n" % path_archive)
833 d_archives_vcs[pn] = (path_archive,
834 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
835 sat.cfg.APPLICATION.workdir = svgDir
836 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
837 return d_archives_vcs
839 def make_archive(prod_name, prod_info, where):
840 '''Create an archive of a product by searching its source directory.
842 :param prod_name str: The name of the product.
843 :param prod_info Config: The specific configuration corresponding to the
845 :param where str: The path of the repository where to put the resulting
847 :return: The path of the resulting archive
850 path_targz_prod = os.path.join(where, prod_name + ".tgz")
851 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
852 local_path = prod_info.source_dir
853 tar_prod.add(local_path,
855 exclude=exclude_VCS_and_extensions)
857 return path_targz_prod
859 def create_project_for_src_package(config, tmp_working_dir, with_vcs):
860 '''Create a specific project for a source package.
862 :param config Config: The global configuration.
863 :param tmp_working_dir str: The temporary local directory containing some
864 specific directories or files needed in the
866 :param with_vcs boolean: True if the package is with vcs products (not
867 transformed into archive products)
868 :return: The dictionary
869 {"project" : (produced project, project path in the archive)}
873 # Create in the working temporary directory the full project tree
874 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
875 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
877 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
880 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
883 patches_tmp_dir = os.path.join(project_tmp_dir,
886 application_tmp_dir = os.path.join(project_tmp_dir,
888 for directory in [project_tmp_dir,
889 compil_scripts_tmp_dir,
892 application_tmp_dir]:
893 src.ensure_path_exists(directory)
895 # Create the pyconf that contains the information of the project
896 project_pyconf_name = "project.pyconf"
897 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
898 ff = open(project_pyconf_file, "w")
899 ff.write(PROJECT_TEMPLATE)
902 # Loop over the products to get there pyconf and all the scripts
903 # (compilation, environment, patches)
904 # and create the pyconf file to add to the project
905 lproducts_name = config.APPLICATION.products.keys()
906 l_products = src.product.get_products_infos(lproducts_name, config)
907 for p_name, p_info in l_products:
908 find_product_scripts_and_pyconf(p_name,
912 compil_scripts_tmp_dir,
915 products_pyconf_tmp_dir)
917 find_application_pyconf(config, application_tmp_dir)
919 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
922 def find_product_scripts_and_pyconf(p_name,
926 compil_scripts_tmp_dir,
929 products_pyconf_tmp_dir):
930 '''Create a specific pyconf file for a given product. Get its environment
931 script, its compilation script and patches and put it in the temporary
932 working directory. This method is used in the source package in order to
933 construct the specific project.
935 :param p_name str: The name of the product.
936 :param p_info Config: The specific configuration corresponding to the
938 :param config Config: The global configuration.
939 :param with_vcs boolean: True if the package is with vcs products (not
940 transformed into archive products)
941 :param compil_scripts_tmp_dir str: The path to the temporary compilation
942 scripts directory of the project.
943 :param env_scripts_tmp_dir str: The path to the temporary environment script
944 directory of the project.
945 :param patches_tmp_dir str: The path to the temporary patch scripts
946 directory of the project.
947 :param products_pyconf_tmp_dir str: The path to the temporary product
948 scripts directory of the project.
951 # read the pyconf of the product
952 product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
953 config.PATHS.PRODUCTPATH)
954 product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
956 # find the compilation script if any
957 if src.product.product_has_script(p_info):
958 compil_script_path = src.Path(p_info.compil_script)
959 compil_script_path.copy(compil_scripts_tmp_dir)
960 product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
961 p_info.compil_script)
962 # find the environment script if any
963 if src.product.product_has_env_script(p_info):
964 env_script_path = src.Path(p_info.environ.env_script)
965 env_script_path.copy(env_scripts_tmp_dir)
966 product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
967 p_info.environ.env_script)
968 # find the patches if any
969 if src.product.product_has_patches(p_info):
970 patches = src.pyconf.Sequence()
971 for patch_path in p_info.patches:
972 p_path = src.Path(patch_path)
973 p_path.copy(patches_tmp_dir)
974 patches.append(os.path.basename(patch_path), "")
976 product_pyconf_cfg[p_info.section].patches = patches
979 # put in the pyconf file the resolved values
980 for info in ["git_info", "cvs_info", "svn_info"]:
982 for key in p_info[info]:
983 product_pyconf_cfg[p_info.section][info][key] = p_info[
986 # if the product is not archive, then make it become archive.
987 if src.product.product_is_vcs(p_info):
988 product_pyconf_cfg[p_info.section].get_source = "archive"
989 if not "archive_info" in product_pyconf_cfg[p_info.section]:
990 product_pyconf_cfg[p_info.section].addMapping("archive_info",
991 src.pyconf.Mapping(product_pyconf_cfg),
993 product_pyconf_cfg[p_info.section
994 ].archive_info.archive_name = p_info.name + ".tgz"
996 # write the pyconf file to the temporary project location
997 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
999 ff = open(product_tmp_pyconf_path, 'w')
1000 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1001 product_pyconf_cfg.__save__(ff, 1)
1004 def find_application_pyconf(config, application_tmp_dir):
1005 '''Find the application pyconf file and put it in the specific temporary
1006 directory containing the specific project of a source package.
1008 :param config Config: The global configuration.
1009 :param application_tmp_dir str: The path to the temporary application
1010 scripts directory of the project.
1012 # read the pyconf of the application
1013 application_name = config.VARS.application
1014 application_pyconf_path = src.find_file_in_lpath(
1015 application_name + ".pyconf",
1016 config.PATHS.APPLICATIONPATH)
1017 application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1019 # Change the workdir
1020 application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1021 application_pyconf_cfg,
1023 'VARS.salometoolsway + $VARS.sep + ".."')
1025 # Prevent from compilation in base
1026 application_pyconf_cfg.APPLICATION.no_base = "yes"
1028 # write the pyconf file to the temporary application location
1029 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1030 application_name + ".pyconf")
1031 ff = open(application_tmp_pyconf_path, 'w')
1032 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1033 application_pyconf_cfg.__save__(ff, 1)
1036 def project_package(config, name_project, project_file_path, tmp_working_dir, logger):
1037 '''Prepare a dictionary that stores all the needed directories and files to
1038 add in a project package.
1040 :param project_file_path str: The path to the local project.
1041 :param tmp_working_dir str: The temporary local directory containing some
1042 specific directories or files needed in the
1044 :return: the dictionary that stores all the needed directories and files to
1045 add in a project package.
1046 {label : (path_on_local_machine, path_in_archive)}
1050 # Read the project file and get the directories to add to the package
1053 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1056 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1057 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1058 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1060 paths = {"ARCHIVEPATH" : "archives",
1061 "APPLICATIONPATH" : "applications",
1062 "PRODUCTPATH" : "products",
1064 "MACHINEPATH" : "machines"}
1065 # Loop over the project paths and add it
1067 if path not in project_pyconf_cfg:
1069 # Add the directory to the files to add in the package
1070 d_project[path] = (project_pyconf_cfg[path], paths[path])
1071 # Modify the value of the path in the package
1072 project_pyconf_cfg[path] = src.pyconf.Reference(
1075 'project_path + "/' + paths[path] + '"')
1077 # Modify some values
1078 if "project_path" not in project_pyconf_cfg:
1079 project_pyconf_cfg.addMapping("project_path",
1080 src.pyconf.Mapping(project_pyconf_cfg),
1082 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1086 # Write the project pyconf file
1087 project_file_name = os.path.basename(project_file_path)
1088 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1089 ff = open(project_pyconf_tmp_path, 'w')
1090 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1091 project_pyconf_cfg.__save__(ff, 1)
1093 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_name)
1097 def add_readme(config, options, where):
1098 readme_path = os.path.join(where, "README")
1099 with codecs.open(readme_path, "w", 'utf-8') as f:
1101 # templates for building the header
1103 # This package was generated with sat $version
1106 # Distribution : $dist
1108 In the following, $$ROOT represents the directory where you have installed
1109 SALOME (the directory where this file is located).
1112 readme_compilation_with_binaries="""
1114 compilation based on the binaries used as prerequisites
1115 =======================================================
1117 If you fail to compile the complete application (for example because
1118 you are not root on your system and cannot install missing packages), you
1119 may try a partial compilation based on the binaries.
1120 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1121 and do some substitutions on cmake and .la files (replace the build directories
1123 The procedure to do it is:
1124 1) Remove or rename INSTALL directory if it exists
1125 2) Execute the shell script install_bin.sh:
1128 3) Use SalomeTool (as explained in Sources section) and compile only the
1129 modules you need to (with -p option)
1132 readme_header_tpl=string.Template(readme_header)
1133 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1134 "README_BIN.template")
1135 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1136 "README_LAUNCHER.template")
1137 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1138 "README_BIN_VIRTUAL_APP.template")
1139 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1140 "README_SRC.template")
1141 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1142 "README_PROJECT.template")
1143 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1144 "README_SAT.template")
1146 # prepare substitution dictionary
1148 d['user'] = config.VARS.user
1149 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1150 d['version'] = config.INTERNAL.sat_version
1151 d['dist'] = config.VARS.dist
1152 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1154 if options.binaries or options.sources:
1155 d['application'] = config.VARS.application
1156 f.write("# Application: " + d['application'] + "\n")
1157 if 'KERNEL' in config.APPLICATION.products:
1158 VersionSalome = src.get_salome_version(config)
1159 # Case where SALOME has the launcher that uses the SalomeContext API
1160 if VersionSalome >= 730:
1161 d['launcher'] = config.APPLICATION.profile.launcher_name
1163 d['virtual_app'] = 'runAppli' # this info is not used now)
1165 # write the specific sections
1166 if options.binaries:
1167 f.write(src.template.substitute(readme_template_path_bin, d))
1168 if "virtual_app" in d:
1169 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1171 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1174 f.write(src.template.substitute(readme_template_path_src, d))
1176 if options.binaries and options.sources:
1177 f.write(readme_compilation_with_binaries)
1180 f.write(src.template.substitute(readme_template_path_pro, d))
1183 f.write(src.template.substitute(readme_template_path_sat, d))
1187 def update_config(config, prop, value):
1188 '''Remove from config.APPLICATION.products the products that have the property given as input.
1190 :param config Config: The global config.
1191 :param prop str: The property to filter
1192 :param value str: The value of the property to filter
1194 src.check_config_has_application(config)
1195 l_product_to_remove = []
1196 for product_name in config.APPLICATION.products.keys():
1197 prod_cfg = src.product.get_product_config(config, product_name)
1198 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1199 l_product_to_remove.append(product_name)
1200 for product_name in l_product_to_remove:
1201 config.APPLICATION.products.__delitem__(product_name)
1204 '''method that is called when salomeTools is called with --help option.
1206 :return: The text to display for the package command description.
1210 The package command creates a tar file archive of a product.
1211 There are four kinds of archive, which can be mixed:
1213 1 - The binary archive.
1214 It contains the product installation directories plus a launcher.
1215 2 - The sources archive.
1216 It contains the product archives, a project (the application plus salomeTools).
1217 3 - The project archive.
1218 It contains a project (give the project file path as argument).
1219 4 - The salomeTools archive.
1220 It contains code utility salomeTools.
1223 >> sat package SALOME-master --binaries --sources""")
1225 def run(args, runner, logger):
1226 '''method that is called when salomeTools is called with package parameter.
1230 (options, args) = parser.parse_args(args)
1232 # Check that a type of package is called, and only one
1233 all_option_types = (options.binaries,
1235 options.project not in ["", None],
1238 # Check if no option for package type
1239 if all_option_types.count(True) == 0:
1240 msg = _("Error: Precise a type for the package\nUse one of the "
1241 "following options: --binaries, --sources, --project or"
1243 logger.write(src.printcolors.printcError(msg), 1)
1244 logger.write("\n", 1)
1247 # The repository where to put the package if not Binary or Source
1248 package_default_path = runner.cfg.LOCAL.workdir
1250 # if the package contains binaries or sources:
1251 if options.binaries or options.sources:
1252 # Check that the command has been called with an application
1253 src.check_config_has_application(runner.cfg)
1255 # Display information
1256 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1257 runner.cfg.VARS.application), 1)
1259 # Get the default directory where to put the packages
1260 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir,
1262 src.ensure_path_exists(package_default_path)
1264 # if the package contains a project:
1266 # check that the project is visible by SAT
1267 projectNameFile = options.project + ".pyconf"
1269 for i in runner.cfg.PROJECTS.project_file_paths:
1270 baseName = os.path.basename(i)
1271 if baseName == projectNameFile:
1275 if foundProject is None:
1276 local_path = os.path.join(runner.cfg.VARS.salometoolsway,
1279 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1283 Please add it in file:
1285 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1286 logger.write(src.printcolors.printcError(msg), 1)
1287 logger.write("\n", 1)
1290 options.project_file_path = foundProject
1291 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1293 # Remove the products that are filtered by the --without_property option
1294 if options.without_property:
1295 [prop, value] = options.without_property.split(":")
1296 update_config(runner.cfg, prop, value)
1298 # get the name of the archive or build it
1300 if os.path.basename(options.name) == options.name:
1301 # only a name (not a path)
1302 archive_name = options.name
1303 dir_name = package_default_path
1305 archive_name = os.path.basename(options.name)
1306 dir_name = os.path.dirname(options.name)
1308 # suppress extension
1309 if archive_name[-len(".tgz"):] == ".tgz":
1310 archive_name = archive_name[:-len(".tgz")]
1311 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1312 archive_name = archive_name[:-len(".tar.gz")]
1316 dir_name = package_default_path
1317 if options.binaries or options.sources:
1318 archive_name = runner.cfg.APPLICATION.name
1320 if options.binaries:
1321 archive_name += "-"+runner.cfg.VARS.dist
1324 archive_name += "-SRC"
1325 if options.with_vcs:
1326 archive_name += "-VCS"
1329 project_name = options.project
1330 archive_name += ("PROJECT-" + project_name)
1333 archive_name += ("salomeTools_" + runner.cfg.INTERNAL.sat_version)
1334 if len(archive_name)==0: # no option worked
1335 msg = _("Error: Cannot name the archive\n"
1336 " check if at least one of the following options was "
1337 "selected : --binaries, --sources, --project or"
1339 logger.write(src.printcolors.printcError(msg), 1)
1340 logger.write("\n", 1)
1343 path_targz = os.path.join(dir_name, archive_name + ".tgz")
1345 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1347 # Create a working directory for all files that are produced during the
1348 # package creation and that will be removed at the end of the command
1349 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1350 src.ensure_path_exists(tmp_working_dir)
1351 logger.write("\n", 5)
1352 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1354 logger.write("\n", 3)
1356 msg = _("Preparation of files to add to the archive")
1357 logger.write(src.printcolors.printcLabel(msg), 2)
1358 logger.write("\n", 2)
1360 d_files_to_add={} # content of the archive
1362 # a dict to hold paths that will need to be substitute for users recompilations
1363 d_paths_to_substitute={}
1365 if options.binaries:
1366 d_bin_files_to_add = binary_package(runner.cfg,
1370 # for all binaries dir, store the substitution that will be required
1371 # for extra compilations
1372 for key in d_bin_files_to_add:
1373 if key.endswith("(bin)"):
1374 source_dir = d_bin_files_to_add[key][0]
1375 path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" + runner.cfg.VARS.dist,"INSTALL")
1376 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1377 # if basename is the same we will just substitute the dirname
1378 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1379 os.path.dirname(path_in_archive)
1381 d_paths_to_substitute[source_dir]=path_in_archive
1383 d_files_to_add.update(d_bin_files_to_add)
1386 d_files_to_add.update(source_package(runner,
1391 if options.binaries:
1392 # for archives with bin and sources we provide a shell script able to
1393 # install binaries for compilation
1394 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1396 d_paths_to_substitute,
1398 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1399 logger.write("substitutions that need to be done later : \n", 5)
1400 logger.write(str(d_paths_to_substitute), 5)
1401 logger.write("\n", 5)
1403 # --salomeTool option is not considered when --sources is selected, as this option
1404 # already brings salomeTool!
1406 d_files_to_add.update({"salomeTools" : (runner.cfg.VARS.salometoolsway, "")})
1409 DBG.write("config for package %s" % project_name, runner.cfg)
1410 d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, tmp_working_dir, logger))
1412 if not(d_files_to_add):
1413 msg = _("Error: Empty dictionnary to build the archive!\n")
1414 logger.write(src.printcolors.printcError(msg), 1)
1415 logger.write("\n", 1)
1418 # Add the README file in the package
1419 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1420 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1422 # Add the additional files of option add_files
1423 if options.add_files:
1424 for file_path in options.add_files:
1425 if not os.path.exists(file_path):
1426 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1428 file_name = os.path.basename(file_path)
1429 d_files_to_add[file_name] = (file_path, file_name)
1431 logger.write("\n", 2)
1433 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1434 logger.write("\n", 2)
1437 # Creating the object tarfile
1438 tar = tarfile.open(path_targz, mode='w:gz')
1440 # get the filtering function if needed
1441 filter_function = exclude_VCS_and_extensions
1443 # Add the files to the tarfile object
1444 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1446 except KeyboardInterrupt:
1447 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1448 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1449 # remove the working directory
1450 shutil.rmtree(tmp_working_dir)
1451 logger.write(_("OK"), 1)
1452 logger.write(_("\n"), 1)
1455 # unconditionaly remove the tmp_local_working_dir
1456 tmp_local_working_dir = os.path.join(runner.cfg.APPLICATION.workdir, "tmp_package")
1457 if os.path.isdir(tmp_local_working_dir):
1458 shutil.rmtree(tmp_local_working_dir)
1461 DBG.tofix("make shutil.rmtree(%s) effective" % tmp_working_dir, "", True)
1463 # Print again the path of the package
1464 logger.write("\n", 2)
1465 src.printcolors.print_value(logger, "Package path", path_targz, 2)