3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
29 from application import get_SALOME_modules
30 import src.debug as DBG
37 ARCHIVE_DIR = "ARCHIVES"
38 PROJECT_DIR = "PROJECT"
40 IGNORED_DIRS = [".git", ".svn"]
41 IGNORED_EXTENSIONS = []
43 PROJECT_TEMPLATE = """#!/usr/bin/env python
46 # The path to the archive root directory
47 root_path : $PWD + "/../"
49 project_path : $PWD + "/"
51 # Where to search the archives of the products
52 ARCHIVEPATH : $root_path + "ARCHIVES"
53 # Where to search the pyconf of the applications
54 APPLICATIONPATH : $project_path + "applications/"
55 # Where to search the pyconf of the products
56 PRODUCTPATH : $project_path + "products/"
57 # Where to search the pyconf of the jobs of the project
58 JOBPATH : $project_path + "jobs/"
59 # Where to search the pyconf of the machines of the project
60 MACHINEPATH : $project_path + "machines/"
63 LOCAL_TEMPLATE = ("""#!/usr/bin/env python
71 archive_dir : 'default'
78 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
79 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
83 # Define all possible option for the package command : sat package <options>
84 parser = src.options.Options()
85 parser.add_option('b', 'binaries', 'boolean', 'binaries',
86 _('Optional: Produce a binary package.'), False)
87 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
88 _('Optional: Only binary package: produce the archive even if '
89 'there are some missing products.'), False)
90 parser.add_option('s', 'sources', 'boolean', 'sources',
91 _('Optional: Produce a compilable archive of the sources of the '
92 'application.'), False)
93 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
94 _('Optional: Only source package: do not make archive of vcs products.'),
96 parser.add_option('p', 'project', 'string', 'project',
97 _('Optional: Produce an archive that contains a project.'), "")
98 parser.add_option('t', 'salometools', 'boolean', 'sat',
99 _('Optional: Produce an archive that contains salomeTools.'), False)
100 parser.add_option('n', 'name', 'string', 'name',
101 _('Optional: The name or full path of the archive.'), None)
102 parser.add_option('', 'add_files', 'list2', 'add_files',
103 _('Optional: The list of additional files to add to the archive.'), [])
104 parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
105 _('Optional: do not add commercial licence.'), False)
106 parser.add_option('', 'without_property', 'string', 'without_property',
107 _('Optional: Filter the products by their properties.\n\tSyntax: '
108 '--without_property <property>:<value>'))
111 def add_files(tar, name_archive, d_content, logger, f_exclude=None):
112 '''Create an archive containing all directories and files that are given in
113 the d_content argument.
115 :param tar tarfile: The tarfile instance used to make the archive.
116 :param name_archive str: The name of the archive to make.
117 :param d_content dict: The dictionary that contain all directories and files
118 to add in the archive.
120 (path_on_local_machine, path_in_archive)
121 :param logger Logger: the logging instance
122 :param f_exclude Function: the function that filters
123 :return: 0 if success, 1 if not.
126 # get the max length of the messages in order to make the display
127 max_len = len(max(d_content.keys(), key=len))
130 # loop over each directory or file stored in the d_content dictionary
131 names = sorted(d_content.keys())
132 DBG.write("add tar names", names)
135 # display information
136 len_points = max_len - len(name)
137 local_path, archive_path = d_content[name]
138 in_archive = os.path.join(name_archive, archive_path)
139 logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
140 # Get the local path and the path in archive
141 # of the directory or file to add
142 # Add it in the archive
144 tar.add(local_path, arcname=in_archive, exclude=f_exclude)
145 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
146 except Exception as e:
147 logger.write(src.printcolors.printcError(_("KO ")), 3)
148 logger.write(str(e), 3)
150 logger.write("\n", 3)
153 def exclude_VCS_and_extensions(filename):
154 ''' The function that is used to exclude from package the link to the
155 VCS repositories (like .git)
157 :param filename Str: The filname to exclude (or not).
158 :return: True if the file has to be exclude
161 for dir_name in IGNORED_DIRS:
162 if dir_name in filename:
164 for extension in IGNORED_EXTENSIONS:
165 if filename.endswith(extension):
169 def produce_relative_launcher(config,
174 with_commercial=True):
175 '''Create a specific SALOME launcher for the binary package. This launcher
178 :param config Config: The global configuration.
179 :param logger Logger: the logging instance
180 :param file_dir str: the directory where to put the launcher
181 :param file_name str: The launcher name
182 :param binaries_dir_name str: the name of the repository where the binaries
184 :return: the path of the produced launcher
188 # get KERNEL installation path
189 kernel_root_dir = os.path.join(binaries_dir_name, "KERNEL")
191 # set kernel bin dir (considering fhs property)
192 kernel_cfg = src.product.get_product_config(config, "KERNEL")
193 if src.get_property_in_product_cfg(kernel_cfg, "fhs"):
194 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin")
196 bin_kernel_install_dir = os.path.join(kernel_root_dir,"bin","salome")
198 # check if the application contains an application module
199 l_product_info = src.product.get_products_infos(config.APPLICATION.products.keys(),
201 salome_application_name="Not defined"
202 for prod_name, prod_info in l_product_info:
203 # look for a salome application
204 if src.get_property_in_product_cfg(prod_info, "is_salome_application") == "yes":
205 salome_application_name=prod_info.name
207 # if the application contains an application module, we set ABSOLUTE_APPLI_PATH to it
208 # if not we set it to KERNEL_INSTALL_DIR, which is sufficient, except for salome test
209 if salome_application_name == "Not defined":
210 app_root_dir=kernel_root_dir
212 app_root_dir=os.path.join(binaries_dir_name, salome_application_name)
214 # Get the launcher template and do substitutions
215 withProfile = src.fileEnviron.withProfile
217 withProfile = withProfile.replace(
218 "ABSOLUTE_APPLI_PATH'] = 'KERNEL_INSTALL_DIR'",
219 "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + app_root_dir + "'")
220 withProfile = withProfile.replace(
221 " 'BIN_KERNEL_INSTALL_DIR'",
222 " out_dir_Path + '" + config.VARS.sep + bin_kernel_install_dir + "'")
224 before, after = withProfile.split("# here your local standalone environment\n")
226 # create an environment file writer
227 writer = src.environment.FileEnvWriter(config,
232 filepath = os.path.join(file_dir, file_name)
233 # open the file and write into it
234 launch_file = open(filepath, "w")
235 launch_file.write(before)
237 writer.write_cfgForPy_file(launch_file,
238 for_package = binaries_dir_name,
239 with_commercial=with_commercial)
240 launch_file.write(after)
243 # Little hack to put out_dir_Path outside the strings
244 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
246 # A hack to put a call to a file for distene licence.
247 # It does nothing to an application that has no distene product
248 hack_for_distene_licence(filepath)
250 # change the rights in order to make the file executable for everybody
262 def hack_for_distene_licence(filepath):
263 '''Replace the distene licence env variable by a call to a file.
265 :param filepath Str: The path to the launcher to modify.
267 shutil.move(filepath, filepath + "_old")
269 filein = filepath + "_old"
270 fin = open(filein, "r")
271 fout = open(fileout, "w")
272 text = fin.readlines()
273 # Find the Distene section
275 for i,line in enumerate(text):
276 if "# Set DISTENE License" in line:
280 # No distene product, there is nothing to do
286 del text[num_line +1]
287 del text[num_line +1]
288 text_to_insert =""" import imp
290 distene = imp.load_source('distene_licence', '/data/tmpsalome/salome/prerequis/install/LICENSE/dlim8.var.py')
291 distene.set_distene_variables(context)
294 text.insert(num_line + 1, text_to_insert)
301 def produce_relative_env_files(config,
305 '''Create some specific environment files for the binary package. These
306 files use relative paths.
308 :param config Config: The global configuration.
309 :param logger Logger: the logging instance
310 :param file_dir str: the directory where to put the files
311 :param binaries_dir_name str: the name of the repository where the binaries
313 :return: the list of path of the produced environment files
316 # create an environment file writer
317 writer = src.environment.FileEnvWriter(config,
323 filepath = writer.write_env_file("env_launch.sh",
326 for_package = binaries_dir_name)
328 # Little hack to put out_dir_Path as environment variable
329 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
331 # change the rights in order to make the file executable for everybody
343 def produce_install_bin_file(config,
348 '''Create a bash shell script which do substitutions in BIRARIES dir
349 in order to use it for extra compilations.
351 :param config Config: The global configuration.
352 :param logger Logger: the logging instance
353 :param file_dir str: the directory where to put the files
354 :param d_sub, dict: the dictionnary that contains the substitutions to be done
355 :param file_name str: the name of the install script file
356 :return: the produced file
360 filepath = os.path.join(file_dir, file_name)
361 # open the file and write into it
362 # use codec utf-8 as sat variables are in unicode
363 with codecs.open(filepath, "w", 'utf-8') as installbin_file:
364 installbin_template_path = os.path.join(config.VARS.internal_dir,
365 "INSTALL_BIN.template")
367 # build the name of the directory that will contain the binaries
368 binaries_dir_name = "BINARIES-" + config.VARS.dist
369 # build the substitution loop
370 loop_cmd = "for f in $(grep -RIl"
372 loop_cmd += " -e "+ key
373 loop_cmd += ' INSTALL); do\n sed -i "\n'
375 loop_cmd += " s?" + key + "?$(pwd)/" + d_sub[key] + "?g\n"
376 loop_cmd += ' " $f\ndone'
379 d["BINARIES_DIR"] = binaries_dir_name
380 d["SUBSTITUTION_LOOP"]=loop_cmd
382 # substitute the template and write it in file
383 content=src.template.substitute(installbin_template_path, d)
384 installbin_file.write(content)
385 # change the rights in order to make the file executable for everybody
397 def product_appli_creation_script(config,
401 '''Create a script that can produce an application (EDF style) in the binary
404 :param config Config: The global configuration.
405 :param logger Logger: the logging instance
406 :param file_dir str: the directory where to put the file
407 :param binaries_dir_name str: the name of the repository where the binaries
409 :return: the path of the produced script file
412 template_name = "create_appli.py.for_bin_packages.template"
413 template_path = os.path.join(config.VARS.internal_dir, template_name)
414 text_to_fill = open(template_path, "r").read()
415 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
416 '"' + binaries_dir_name + '"')
419 for product_name in get_SALOME_modules(config):
420 product_info = src.product.get_product_config(config, product_name)
422 if src.product.product_is_smesh_plugin(product_info):
425 if 'install_dir' in product_info and bool(product_info.install_dir):
426 if src.product.product_is_cpp(product_info):
428 for cpp_name in src.product.get_product_components(product_info):
429 line_to_add = ("<module name=\"" +
431 "\" gui=\"yes\" path=\"''' + "
432 "os.path.join(dir_bin_name, \"" +
433 cpp_name + "\") + '''\"/>")
436 line_to_add = ("<module name=\"" +
438 "\" gui=\"yes\" path=\"''' + "
439 "os.path.join(dir_bin_name, \"" +
440 product_name + "\") + '''\"/>")
441 text_to_add += line_to_add + "\n"
443 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
445 tmp_file_path = os.path.join(file_dir, "create_appli.py")
446 ff = open(tmp_file_path, "w")
447 ff.write(filled_text)
450 # change the rights in order to make the file executable for everybody
451 os.chmod(tmp_file_path,
462 def binary_package(config, logger, options, tmp_working_dir):
463 '''Prepare a dictionary that stores all the needed directories and files to
464 add in a binary package.
466 :param config Config: The global configuration.
467 :param logger Logger: the logging instance
468 :param options OptResult: the options of the launched command
469 :param tmp_working_dir str: The temporary local directory containing some
470 specific directories or files needed in the
472 :return: the dictionary that stores all the needed directories and files to
473 add in a binary package.
474 {label : (path_on_local_machine, path_in_archive)}
478 # Get the list of product installation to add to the archive
479 l_products_name = sorted(config.APPLICATION.products.keys())
480 l_product_info = src.product.get_products_infos(l_products_name,
485 l_sources_not_present = []
486 generate_mesa_launcher = False # a flag to know if we generate a mesa launcher
487 for prod_name, prod_info in l_product_info:
488 # skip product with property not_in_package set to yes
489 if src.get_property_in_product_cfg(prod_info, "not_in_package") == "yes":
492 # Add the sources of the products that have the property
493 # sources_in_package : "yes"
494 if src.get_property_in_product_cfg(prod_info,
495 "sources_in_package") == "yes":
496 if os.path.exists(prod_info.source_dir):
497 l_source_dir.append((prod_name, prod_info.source_dir))
499 l_sources_not_present.append(prod_name)
501 # if at least one of the application products has the "is_mesa" property
502 if src.get_property_in_product_cfg(prod_info, "is_mesa") == "yes":
503 generate_mesa_launcher = True # we will generate a mesa launcher
505 # ignore the native and fixed products for install directories
506 if (src.product.product_is_native(prod_info)
507 or src.product.product_is_fixed(prod_info)
508 or not src.product.product_compiles(prod_info)):
510 if src.product.check_installation(prod_info):
511 l_install_dir.append((prod_name, prod_info.install_dir))
513 l_not_installed.append(prod_name)
515 # Add also the cpp generated modules (if any)
516 if src.product.product_is_cpp(prod_info):
518 for name_cpp in src.product.get_product_components(prod_info):
519 install_dir = os.path.join(config.APPLICATION.workdir,
521 if os.path.exists(install_dir):
522 l_install_dir.append((name_cpp, install_dir))
524 l_not_installed.append(name_cpp)
526 # check the name of the directory that (could) contains the binaries
527 # from previous detar
528 binaries_from_detar = os.path.join(config.APPLICATION.workdir, "BINARIES-" + config.VARS.dist)
529 if os.path.exists(binaries_from_detar):
531 WARNING: existing binaries directory from previous detar installation:
533 To make new package from this, you have to:
534 1) install binaries in INSTALL directory with the script "install_bin.sh"
535 see README file for more details
536 2) or recompile everything in INSTALL with "sat compile" command
537 this step is long, and requires some linux packages to be installed
539 """ % binaries_from_detar)
541 # Print warning or error if there are some missing products
542 if len(l_not_installed) > 0:
543 text_missing_prods = ""
544 for p_name in l_not_installed:
545 text_missing_prods += "-" + p_name + "\n"
546 if not options.force_creation:
547 msg = _("ERROR: there are missing products installations:")
548 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
553 msg = _("WARNING: there are missing products installations:")
554 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
558 # Do the same for sources
559 if len(l_sources_not_present) > 0:
560 text_missing_prods = ""
561 for p_name in l_sources_not_present:
562 text_missing_prods += "-" + p_name + "\n"
563 if not options.force_creation:
564 msg = _("ERROR: there are missing products sources:")
565 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
570 msg = _("WARNING: there are missing products sources:")
571 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
575 # construct the name of the directory that will contain the binaries
576 binaries_dir_name = "BINARIES-" + config.VARS.dist
578 # construct the correlation table between the product names, there
579 # actual install directories and there install directory in archive
581 for prod_name, install_dir in l_install_dir:
582 path_in_archive = os.path.join(binaries_dir_name, prod_name)
583 d_products[prod_name + " (bin)"] = (install_dir, path_in_archive)
585 for prod_name, source_dir in l_source_dir:
586 path_in_archive = os.path.join("SOURCES", prod_name)
587 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
589 # for packages of SALOME applications including KERNEL,
590 # we produce a salome launcher or a virtual application (depending on salome version)
591 if 'KERNEL' in config.APPLICATION.products:
592 VersionSalome = src.get_salome_version(config)
593 # Case where SALOME has the launcher that uses the SalomeContext API
594 if VersionSalome >= 730:
595 # create the relative launcher and add it to the files to add
596 launcher_name = src.get_launcher_name(config)
597 launcher_package = produce_relative_launcher(config,
602 not(options.without_commercial))
603 d_products["launcher"] = (launcher_package, launcher_name)
605 # if the application contains mesa products, we generate in addition to the
606 # classical salome launcher a launcher using mesa and called mesa_salome
607 # (the mesa launcher will be used for remote usage through ssh).
608 if generate_mesa_launcher:
609 #if there is one : store the use_mesa property
610 restore_use_mesa_option=None
611 if ('properties' in config.APPLICATION and
612 'use_mesa' in config.APPLICATION.properties):
613 restore_use_mesa_option = config.APPLICATION.properties.use_mesa
615 # activate mesa property, and generate a mesa launcher
616 src.activate_mesa_property(config) #activate use_mesa property
617 launcher_mesa_name="mesa_"+launcher_name
618 launcher_package_mesa = produce_relative_launcher(config,
623 not(options.without_commercial))
624 d_products["launcher (mesa)"] = (launcher_package_mesa, launcher_mesa_name)
626 # if there was a use_mesa value, we restore it
627 # else we set it to the default value "no"
628 if restore_use_mesa_option != None:
629 config.APPLICATION.properties.use_mesa=restore_use_mesa_option
631 config.APPLICATION.properties.use_mesa="no"
634 # if we mix binaries and sources, we add a copy of the launcher,
635 # prefixed with "bin",in order to avoid clashes
636 d_products["launcher (copy)"] = (launcher_package, "bin"+launcher_name)
638 # Provide a script for the creation of an application EDF style
639 appli_script = product_appli_creation_script(config,
644 d_products["appli script"] = (appli_script, "create_appli.py")
646 # Put also the environment file
647 env_file = produce_relative_env_files(config,
652 d_products["environment file"] = (env_file, "env_launch.sh")
656 def source_package(sat, config, logger, options, tmp_working_dir):
657 '''Prepare a dictionary that stores all the needed directories and files to
658 add in a source package.
660 :param config Config: The global configuration.
661 :param logger Logger: the logging instance
662 :param options OptResult: the options of the launched command
663 :param tmp_working_dir str: The temporary local directory containing some
664 specific directories or files needed in the
666 :return: the dictionary that stores all the needed directories and files to
667 add in a source package.
668 {label : (path_on_local_machine, path_in_archive)}
672 # Get all the products that are prepared using an archive
673 logger.write("Find archive products ... ")
674 d_archives, l_pinfo_vcs = get_archives(config, logger)
675 logger.write("Done\n")
677 if not options.with_vcs and len(l_pinfo_vcs) > 0:
678 # Make archives with the products that are not prepared using an archive
679 # (git, cvs, svn, etc)
680 logger.write("Construct archives for vcs products ... ")
681 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
686 logger.write("Done\n")
689 logger.write("Create the project ... ")
690 d_project = create_project_for_src_package(config,
693 logger.write("Done\n")
696 tmp_sat = add_salomeTools(config, tmp_working_dir)
697 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
699 # Add a sat symbolic link if not win
700 if not src.architecture.is_windows():
701 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
705 # In the jobs, os.getcwd() can fail
706 t = config.LOCAL.workdir
707 os.chdir(tmp_working_dir)
708 if os.path.lexists(tmp_satlink_path):
709 os.remove(tmp_satlink_path)
710 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
713 d_sat["sat link"] = (tmp_satlink_path, "sat")
715 d_source = src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
718 def get_archives(config, logger):
719 '''Find all the products that are get using an archive and all the products
720 that are get using a vcs (git, cvs, svn) repository.
722 :param config Config: The global configuration.
723 :param logger Logger: the logging instance
724 :return: the dictionary {name_product :
725 (local path of its archive, path in the package of its archive )}
726 and the list of specific configuration corresponding to the vcs
730 # Get the list of product informations
731 l_products_name = config.APPLICATION.products.keys()
732 l_product_info = src.product.get_products_infos(l_products_name,
736 for p_name, p_info in l_product_info:
737 # skip product with property not_in_package set to yes
738 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
740 # ignore the native and fixed products
741 if (src.product.product_is_native(p_info)
742 or src.product.product_is_fixed(p_info)):
744 if p_info.get_source == "archive":
745 archive_path = p_info.archive_info.archive_name
746 archive_name = os.path.basename(archive_path)
748 l_pinfo_vcs.append((p_name, p_info))
750 d_archives[p_name] = (archive_path,
751 os.path.join(ARCHIVE_DIR, archive_name))
752 return d_archives, l_pinfo_vcs
754 def add_salomeTools(config, tmp_working_dir):
755 '''Prepare a version of salomeTools that has a specific local.pyconf file
756 configured for a source package.
758 :param config Config: The global configuration.
759 :param tmp_working_dir str: The temporary local directory containing some
760 specific directories or files needed in the
762 :return: The path to the local salomeTools directory to add in the package
765 # Copy sat in the temporary working directory
766 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
767 sat_running_path = src.Path(config.VARS.salometoolsway)
768 sat_running_path.copy(sat_tmp_path)
770 # Update the local.pyconf file that contains the path to the project
771 local_pyconf_name = "local.pyconf"
772 local_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
773 local_pyconf_file = os.path.join(local_pyconf_dir, local_pyconf_name)
774 # Remove the .pyconf file in the root directory of salomeTools if there is
775 # any. (For example when launching jobs, a pyconf file describing the jobs
776 # can be here and is not useful)
777 files_or_dir_SAT = os.listdir(os.path.join(tmp_working_dir, "salomeTools"))
778 for file_or_dir in files_or_dir_SAT:
779 if file_or_dir.endswith(".pyconf") or file_or_dir.endswith(".txt"):
780 file_path = os.path.join(tmp_working_dir,
785 ff = open(local_pyconf_file, "w")
786 ff.write(LOCAL_TEMPLATE)
789 return sat_tmp_path.path
791 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
792 '''For sources package that require that all products are get using an
793 archive, one has to create some archive for the vcs products.
794 So this method calls the clean and source command of sat and then create
797 :param l_pinfo_vcs List: The list of specific configuration corresponding to
799 :param sat Sat: The Sat instance that can be called to clean and source the
801 :param config Config: The global configuration.
802 :param logger Logger: the logging instance
803 :param tmp_working_dir str: The temporary local directory containing some
804 specific directories or files needed in the
806 :return: the dictionary that stores all the archives to add in the source
807 package. {label : (path_on_local_machine, path_in_archive)}
810 # clean the source directory of all the vcs products, then use the source
811 # command and thus construct an archive that will not contain the patches
812 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
813 if False: # clean is dangerous in user/SOURCES, fixed in tmp_local_working_dir
814 logger.write(_("\nclean sources\n"))
815 args_clean = config.VARS.application
816 args_clean += " --sources --products "
817 args_clean += ",".join(l_prod_names)
818 logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
819 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
822 logger.write(_("get sources\n"))
823 args_source = config.VARS.application
824 args_source += " --products "
825 args_source += ",".join(l_prod_names)
826 svgDir = sat.cfg.APPLICATION.workdir
827 tmp_local_working_dir = os.path.join(sat.cfg.APPLICATION.workdir, "tmp_package") # to avoid too much big files in /tmp
828 sat.cfg.APPLICATION.workdir = tmp_local_working_dir
829 # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
830 # DBG.write("sat config id", id(sat.cfg), True)
831 # shit as config is not same id() as for sat.source()
832 # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
834 source.run(args_source, sat, logger) #use this mode as runner.cfg reference
836 # make the new archives
838 for pn, pinfo in l_pinfo_vcs:
839 path_archive = make_archive(pn, pinfo, tmp_local_working_dir)
840 logger.write("make archive vcs '%s'\n" % path_archive)
841 d_archives_vcs[pn] = (path_archive,
842 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
843 sat.cfg.APPLICATION.workdir = svgDir
844 # DBG.write("END sat config", sat.cfg.APPLICATION, True)
845 return d_archives_vcs
847 def make_archive(prod_name, prod_info, where):
848 '''Create an archive of a product by searching its source directory.
850 :param prod_name str: The name of the product.
851 :param prod_info Config: The specific configuration corresponding to the
853 :param where str: The path of the repository where to put the resulting
855 :return: The path of the resulting archive
858 path_targz_prod = os.path.join(where, prod_name + ".tgz")
859 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
860 local_path = prod_info.source_dir
861 tar_prod.add(local_path,
863 exclude=exclude_VCS_and_extensions)
865 return path_targz_prod
867 def create_project_for_src_package(config, tmp_working_dir, with_vcs):
868 '''Create a specific project for a source package.
870 :param config Config: The global configuration.
871 :param tmp_working_dir str: The temporary local directory containing some
872 specific directories or files needed in the
874 :param with_vcs boolean: True if the package is with vcs products (not
875 transformed into archive products)
876 :return: The dictionary
877 {"project" : (produced project, project path in the archive)}
881 # Create in the working temporary directory the full project tree
882 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
883 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
885 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
888 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
891 patches_tmp_dir = os.path.join(project_tmp_dir,
894 application_tmp_dir = os.path.join(project_tmp_dir,
896 for directory in [project_tmp_dir,
897 compil_scripts_tmp_dir,
900 application_tmp_dir]:
901 src.ensure_path_exists(directory)
903 # Create the pyconf that contains the information of the project
904 project_pyconf_name = "project.pyconf"
905 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
906 ff = open(project_pyconf_file, "w")
907 ff.write(PROJECT_TEMPLATE)
910 # Loop over the products to get there pyconf and all the scripts
911 # (compilation, environment, patches)
912 # and create the pyconf file to add to the project
913 lproducts_name = config.APPLICATION.products.keys()
914 l_products = src.product.get_products_infos(lproducts_name, config)
915 for p_name, p_info in l_products:
916 # skip product with property not_in_package set to yes
917 if src.get_property_in_product_cfg(p_info, "not_in_package") == "yes":
919 find_product_scripts_and_pyconf(p_name,
923 compil_scripts_tmp_dir,
926 products_pyconf_tmp_dir)
928 find_application_pyconf(config, application_tmp_dir)
930 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
933 def find_product_scripts_and_pyconf(p_name,
937 compil_scripts_tmp_dir,
940 products_pyconf_tmp_dir):
941 '''Create a specific pyconf file for a given product. Get its environment
942 script, its compilation script and patches and put it in the temporary
943 working directory. This method is used in the source package in order to
944 construct the specific project.
946 :param p_name str: The name of the product.
947 :param p_info Config: The specific configuration corresponding to the
949 :param config Config: The global configuration.
950 :param with_vcs boolean: True if the package is with vcs products (not
951 transformed into archive products)
952 :param compil_scripts_tmp_dir str: The path to the temporary compilation
953 scripts directory of the project.
954 :param env_scripts_tmp_dir str: The path to the temporary environment script
955 directory of the project.
956 :param patches_tmp_dir str: The path to the temporary patch scripts
957 directory of the project.
958 :param products_pyconf_tmp_dir str: The path to the temporary product
959 scripts directory of the project.
962 # read the pyconf of the product
963 product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
964 config.PATHS.PRODUCTPATH)
965 product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
967 # find the compilation script if any
968 if src.product.product_has_script(p_info):
969 compil_script_path = src.Path(p_info.compil_script)
970 compil_script_path.copy(compil_scripts_tmp_dir)
971 product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
972 p_info.compil_script)
973 # find the environment script if any
974 if src.product.product_has_env_script(p_info):
975 env_script_path = src.Path(p_info.environ.env_script)
976 env_script_path.copy(env_scripts_tmp_dir)
977 product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
978 p_info.environ.env_script)
979 # find the patches if any
980 if src.product.product_has_patches(p_info):
981 patches = src.pyconf.Sequence()
982 for patch_path in p_info.patches:
983 p_path = src.Path(patch_path)
984 p_path.copy(patches_tmp_dir)
985 patches.append(os.path.basename(patch_path), "")
987 product_pyconf_cfg[p_info.section].patches = patches
990 # put in the pyconf file the resolved values
991 for info in ["git_info", "cvs_info", "svn_info"]:
993 for key in p_info[info]:
994 product_pyconf_cfg[p_info.section][info][key] = p_info[
997 # if the product is not archive, then make it become archive.
998 if src.product.product_is_vcs(p_info):
999 product_pyconf_cfg[p_info.section].get_source = "archive"
1000 if not "archive_info" in product_pyconf_cfg[p_info.section]:
1001 product_pyconf_cfg[p_info.section].addMapping("archive_info",
1002 src.pyconf.Mapping(product_pyconf_cfg),
1004 product_pyconf_cfg[p_info.section
1005 ].archive_info.archive_name = p_info.name + ".tgz"
1007 # write the pyconf file to the temporary project location
1008 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
1010 ff = open(product_tmp_pyconf_path, 'w')
1011 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1012 product_pyconf_cfg.__save__(ff, 1)
1015 def find_application_pyconf(config, application_tmp_dir):
1016 '''Find the application pyconf file and put it in the specific temporary
1017 directory containing the specific project of a source package.
1019 :param config Config: The global configuration.
1020 :param application_tmp_dir str: The path to the temporary application
1021 scripts directory of the project.
1023 # read the pyconf of the application
1024 application_name = config.VARS.application
1025 application_pyconf_path = src.find_file_in_lpath(
1026 application_name + ".pyconf",
1027 config.PATHS.APPLICATIONPATH)
1028 application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
1030 # Change the workdir
1031 application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
1032 application_pyconf_cfg,
1034 'VARS.salometoolsway + $VARS.sep + ".."')
1036 # Prevent from compilation in base
1037 application_pyconf_cfg.APPLICATION.no_base = "yes"
1039 # write the pyconf file to the temporary application location
1040 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
1041 application_name + ".pyconf")
1042 ff = open(application_tmp_pyconf_path, 'w')
1043 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1044 application_pyconf_cfg.__save__(ff, 1)
1047 def project_package(config, name_project, project_file_path, tmp_working_dir, logger):
1048 '''Prepare a dictionary that stores all the needed directories and files to
1049 add in a project package.
1051 :param project_file_path str: The path to the local project.
1052 :param tmp_working_dir str: The temporary local directory containing some
1053 specific directories or files needed in the
1055 :return: the dictionary that stores all the needed directories and files to
1056 add in a project package.
1057 {label : (path_on_local_machine, path_in_archive)}
1061 # Read the project file and get the directories to add to the package
1064 project_pyconf_cfg = config.PROJECTS.projects.__getattr__(name_project)
1067 WARNING: inexisting config.PROJECTS.projects.%s, try to read now from:\n%s\n""" % (name_project, project_file_path))
1068 project_pyconf_cfg = src.pyconf.Config(project_file_path)
1069 project_pyconf_cfg.PWD = os.path.dirname(project_file_path)
1071 paths = {"ARCHIVEPATH" : "archives",
1072 "APPLICATIONPATH" : "applications",
1073 "PRODUCTPATH" : "products",
1075 "MACHINEPATH" : "machines"}
1076 # Loop over the project paths and add it
1078 if path not in project_pyconf_cfg:
1080 # Add the directory to the files to add in the package
1081 d_project[path] = (project_pyconf_cfg[path], paths[path])
1082 # Modify the value of the path in the package
1083 project_pyconf_cfg[path] = src.pyconf.Reference(
1086 'project_path + "/' + paths[path] + '"')
1088 # Modify some values
1089 if "project_path" not in project_pyconf_cfg:
1090 project_pyconf_cfg.addMapping("project_path",
1091 src.pyconf.Mapping(project_pyconf_cfg),
1093 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
1097 # Write the project pyconf file
1098 project_file_name = os.path.basename(project_file_path)
1099 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
1100 ff = open(project_pyconf_tmp_path, 'w')
1101 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
1102 project_pyconf_cfg.__save__(ff, 1)
1104 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_name)
1108 def add_readme(config, options, where):
1109 readme_path = os.path.join(where, "README")
1110 with codecs.open(readme_path, "w", 'utf-8') as f:
1112 # templates for building the header
1114 # This package was generated with sat $version
1117 # Distribution : $dist
1119 In the following, $$ROOT represents the directory where you have installed
1120 SALOME (the directory where this file is located).
1123 readme_compilation_with_binaries="""
1125 compilation based on the binaries used as prerequisites
1126 =======================================================
1128 If you fail to compile the complete application (for example because
1129 you are not root on your system and cannot install missing packages), you
1130 may try a partial compilation based on the binaries.
1131 For that it is necessary to copy the binaries from BINARIES to INSTALL,
1132 and do some substitutions on cmake and .la files (replace the build directories
1134 The procedure to do it is:
1135 1) Remove or rename INSTALL directory if it exists
1136 2) Execute the shell script install_bin.sh:
1139 3) Use SalomeTool (as explained in Sources section) and compile only the
1140 modules you need to (with -p option)
1143 readme_header_tpl=string.Template(readme_header)
1144 readme_template_path_bin = os.path.join(config.VARS.internal_dir,
1145 "README_BIN.template")
1146 readme_template_path_bin_launcher = os.path.join(config.VARS.internal_dir,
1147 "README_LAUNCHER.template")
1148 readme_template_path_bin_virtapp = os.path.join(config.VARS.internal_dir,
1149 "README_BIN_VIRTUAL_APP.template")
1150 readme_template_path_src = os.path.join(config.VARS.internal_dir,
1151 "README_SRC.template")
1152 readme_template_path_pro = os.path.join(config.VARS.internal_dir,
1153 "README_PROJECT.template")
1154 readme_template_path_sat = os.path.join(config.VARS.internal_dir,
1155 "README_SAT.template")
1157 # prepare substitution dictionary
1159 d['user'] = config.VARS.user
1160 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
1161 d['version'] = config.INTERNAL.sat_version
1162 d['dist'] = config.VARS.dist
1163 f.write(readme_header_tpl.substitute(d)) # write the general header (common)
1165 if options.binaries or options.sources:
1166 d['application'] = config.VARS.application
1167 f.write("# Application: " + d['application'] + "\n")
1168 if 'KERNEL' in config.APPLICATION.products:
1169 VersionSalome = src.get_salome_version(config)
1170 # Case where SALOME has the launcher that uses the SalomeContext API
1171 if VersionSalome >= 730:
1172 d['launcher'] = config.APPLICATION.profile.launcher_name
1174 d['virtual_app'] = 'runAppli' # this info is not used now)
1176 # write the specific sections
1177 if options.binaries:
1178 f.write(src.template.substitute(readme_template_path_bin, d))
1179 if "virtual_app" in d:
1180 f.write(src.template.substitute(readme_template_path_bin_virtapp, d))
1182 f.write(src.template.substitute(readme_template_path_bin_launcher, d))
1185 f.write(src.template.substitute(readme_template_path_src, d))
1187 if options.binaries and options.sources:
1188 f.write(readme_compilation_with_binaries)
1191 f.write(src.template.substitute(readme_template_path_pro, d))
1194 f.write(src.template.substitute(readme_template_path_sat, d))
1198 def update_config(config, prop, value):
1199 '''Remove from config.APPLICATION.products the products that have the property given as input.
1201 :param config Config: The global config.
1202 :param prop str: The property to filter
1203 :param value str: The value of the property to filter
1205 src.check_config_has_application(config)
1206 l_product_to_remove = []
1207 for product_name in config.APPLICATION.products.keys():
1208 prod_cfg = src.product.get_product_config(config, product_name)
1209 if src.get_property_in_product_cfg(prod_cfg, prop) == value:
1210 l_product_to_remove.append(product_name)
1211 for product_name in l_product_to_remove:
1212 config.APPLICATION.products.__delitem__(product_name)
1215 '''method that is called when salomeTools is called with --help option.
1217 :return: The text to display for the package command description.
1221 The package command creates a tar file archive of a product.
1222 There are four kinds of archive, which can be mixed:
1224 1 - The binary archive.
1225 It contains the product installation directories plus a launcher.
1226 2 - The sources archive.
1227 It contains the product archives, a project (the application plus salomeTools).
1228 3 - The project archive.
1229 It contains a project (give the project file path as argument).
1230 4 - The salomeTools archive.
1231 It contains code utility salomeTools.
1234 >> sat package SALOME-master --binaries --sources""")
1236 def run(args, runner, logger):
1237 '''method that is called when salomeTools is called with package parameter.
1241 (options, args) = parser.parse_args(args)
1243 # Check that a type of package is called, and only one
1244 all_option_types = (options.binaries,
1246 options.project not in ["", None],
1249 # Check if no option for package type
1250 if all_option_types.count(True) == 0:
1251 msg = _("Error: Precise a type for the package\nUse one of the "
1252 "following options: --binaries, --sources, --project or"
1254 logger.write(src.printcolors.printcError(msg), 1)
1255 logger.write("\n", 1)
1258 # The repository where to put the package if not Binary or Source
1259 package_default_path = runner.cfg.LOCAL.workdir
1261 # if the package contains binaries or sources:
1262 if options.binaries or options.sources:
1263 # Check that the command has been called with an application
1264 src.check_config_has_application(runner.cfg)
1266 # Display information
1267 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
1268 runner.cfg.VARS.application), 1)
1270 # Get the default directory where to put the packages
1271 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir,
1273 src.ensure_path_exists(package_default_path)
1275 # if the package contains a project:
1277 # check that the project is visible by SAT
1278 projectNameFile = options.project + ".pyconf"
1280 for i in runner.cfg.PROJECTS.project_file_paths:
1281 baseName = os.path.basename(i)
1282 if baseName == projectNameFile:
1286 if foundProject is None:
1287 local_path = os.path.join(runner.cfg.VARS.salometoolsway,
1290 msg = _("""ERROR: the project %(1)s is not visible by salomeTools.
1294 Please add it in file:
1296 {"1": options.project, "2": "\n ".join(runner.cfg.PROJECTS.project_file_paths), "3": local_path})
1297 logger.write(src.printcolors.printcError(msg), 1)
1298 logger.write("\n", 1)
1301 options.project_file_path = foundProject
1302 src.printcolors.print_value(logger, "Project path", options.project_file_path, 2)
1304 # Remove the products that are filtered by the --without_property option
1305 if options.without_property:
1306 [prop, value] = options.without_property.split(":")
1307 update_config(runner.cfg, prop, value)
1309 # get the name of the archive or build it
1311 if os.path.basename(options.name) == options.name:
1312 # only a name (not a path)
1313 archive_name = options.name
1314 dir_name = package_default_path
1316 archive_name = os.path.basename(options.name)
1317 dir_name = os.path.dirname(options.name)
1319 # suppress extension
1320 if archive_name[-len(".tgz"):] == ".tgz":
1321 archive_name = archive_name[:-len(".tgz")]
1322 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1323 archive_name = archive_name[:-len(".tar.gz")]
1327 dir_name = package_default_path
1328 if options.binaries or options.sources:
1329 archive_name = runner.cfg.APPLICATION.name
1331 if options.binaries:
1332 archive_name += "-"+runner.cfg.VARS.dist
1335 archive_name += "-SRC"
1336 if options.with_vcs:
1337 archive_name += "-VCS"
1340 project_name = options.project
1341 archive_name += ("PROJECT-" + project_name)
1344 archive_name += ("salomeTools_" + runner.cfg.INTERNAL.sat_version)
1345 if len(archive_name)==0: # no option worked
1346 msg = _("Error: Cannot name the archive\n"
1347 " check if at least one of the following options was "
1348 "selected : --binaries, --sources, --project or"
1350 logger.write(src.printcolors.printcError(msg), 1)
1351 logger.write("\n", 1)
1354 path_targz = os.path.join(dir_name, archive_name + ".tgz")
1356 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1358 # Create a working directory for all files that are produced during the
1359 # package creation and that will be removed at the end of the command
1360 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root, runner.cfg.VARS.datehour)
1361 src.ensure_path_exists(tmp_working_dir)
1362 logger.write("\n", 5)
1363 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1365 logger.write("\n", 3)
1367 msg = _("Preparation of files to add to the archive")
1368 logger.write(src.printcolors.printcLabel(msg), 2)
1369 logger.write("\n", 2)
1371 d_files_to_add={} # content of the archive
1373 # a dict to hold paths that will need to be substitute for users recompilations
1374 d_paths_to_substitute={}
1376 if options.binaries:
1377 d_bin_files_to_add = binary_package(runner.cfg,
1381 # for all binaries dir, store the substitution that will be required
1382 # for extra compilations
1383 for key in d_bin_files_to_add:
1384 if key.endswith("(bin)"):
1385 source_dir = d_bin_files_to_add[key][0]
1386 path_in_archive = d_bin_files_to_add[key][1].replace("BINARIES-" + runner.cfg.VARS.dist,"INSTALL")
1387 if os.path.basename(source_dir)==os.path.basename(path_in_archive):
1388 # if basename is the same we will just substitute the dirname
1389 d_paths_to_substitute[os.path.dirname(source_dir)]=\
1390 os.path.dirname(path_in_archive)
1392 d_paths_to_substitute[source_dir]=path_in_archive
1394 d_files_to_add.update(d_bin_files_to_add)
1397 d_files_to_add.update(source_package(runner,
1402 if options.binaries:
1403 # for archives with bin and sources we provide a shell script able to
1404 # install binaries for compilation
1405 file_install_bin=produce_install_bin_file(runner.cfg,logger,
1407 d_paths_to_substitute,
1409 d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
1410 logger.write("substitutions that need to be done later : \n", 5)
1411 logger.write(str(d_paths_to_substitute), 5)
1412 logger.write("\n", 5)
1414 # --salomeTool option is not considered when --sources is selected, as this option
1415 # already brings salomeTool!
1417 d_files_to_add.update({"salomeTools" : (runner.cfg.VARS.salometoolsway, "")})
1420 DBG.write("config for package %s" % project_name, runner.cfg)
1421 d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, tmp_working_dir, logger))
1423 if not(d_files_to_add):
1424 msg = _("Error: Empty dictionnary to build the archive!\n")
1425 logger.write(src.printcolors.printcError(msg), 1)
1426 logger.write("\n", 1)
1429 # Add the README file in the package
1430 local_readme_tmp_path = add_readme(runner.cfg, options, tmp_working_dir)
1431 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1433 # Add the additional files of option add_files
1434 if options.add_files:
1435 for file_path in options.add_files:
1436 if not os.path.exists(file_path):
1437 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1439 file_name = os.path.basename(file_path)
1440 d_files_to_add[file_name] = (file_path, file_name)
1442 logger.write("\n", 2)
1444 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1445 logger.write("\n", 2)
1449 # Creating the object tarfile
1450 tar = tarfile.open(path_targz, mode='w:gz')
1452 # get the filtering function if needed
1453 filter_function = exclude_VCS_and_extensions
1455 # Add the files to the tarfile object
1456 res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function)
1458 except KeyboardInterrupt:
1459 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1460 logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
1461 # remove the working directory
1462 shutil.rmtree(tmp_working_dir)
1463 logger.write(_("OK"), 1)
1464 logger.write(_("\n"), 1)
1467 # unconditionaly remove the tmp_local_working_dir
1468 tmp_local_working_dir = os.path.join(runner.cfg.APPLICATION.workdir, "tmp_package")
1469 if os.path.isdir(tmp_local_working_dir):
1470 shutil.rmtree(tmp_local_working_dir)
1472 # have to decide some time
1473 DBG.tofix("make shutil.rmtree('%s') effective" % tmp_working_dir, "", DBG.isDeveloper())
1475 # Print again the path of the package
1476 logger.write("\n", 2)
1477 src.printcolors.print_value(logger, "Package path", path_targz, 2)