3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 from application import get_SALOME_modules
34 ARCHIVE_DIR = "ARCHIVES"
35 PROJECT_DIR = "PROJECT"
37 PROJECT_TEMPLATE = """#!/usr/bin/env python
40 # The path to the archive root directory
41 root_path : $PWD + "/../"
43 project_path : $PWD + "/"
45 # Where to search the archives of the products
46 ARCHIVEPATH : $root_path + "ARCHIVES"
47 # Where to search the pyconf of the applications
48 APPLICATIONPATH : $project_path + "applications/"
49 # Where to search the pyconf of the products
50 PRODUCTPATH : $project_path + "products/"
51 # Where to search the pyconf of the jobs of the project
52 JOBPATH : $project_path + "jobs/"
53 # Where to search the pyconf of the machines of the project
54 MACHINEPATH : $project_path + "machines/"
57 SITE_TEMPLATE = ("""#!/usr/bin/env python
64 log_dir : $USER.workdir + "/LOGS"
67 tmp_dir_with_application : '/tmp' + $VARS.sep + $VARS.user + """
68 """$VARS.sep + $APPLICATION.name + $VARS.sep + 'test'
69 tmp_dir : '/tmp' + $VARS.sep + $VARS.user + $VARS.sep + 'test'
76 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
77 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
81 # Define all possible option for the package command : sat package <options>
82 parser = src.options.Options()
83 parser.add_option('b', 'binaries', 'boolean', 'binaries',
84 _('Optional: Produce a binary package.'), False)
85 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
86 _('Optional: Only binary package: produce the archive even if '
87 'there are some missing products.'), False)
88 parser.add_option('s', 'sources', 'boolean', 'sources',
89 _('Optional: Produce a compilable archive of the sources of the '
90 'application.'), False)
91 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
92 _('Optional: Only source package: do not make archive of vcs products.'),
94 parser.add_option('p', 'project', 'string', 'project',
95 _('Optional: Produce an archive that contains a project.'), "")
96 parser.add_option('t', 'salometools', 'boolean', 'sat',
97 _('Optional: Produce an archive that contains salomeTools.'), False)
98 parser.add_option('n', 'name', 'string', 'name',
99 _('Optional: The name or full path of the archive.'), None)
100 parser.add_option('', 'add_files', 'list2', 'add_files',
101 _('Optional: The list of additional files to add to the archive.'), [])
102 parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
103 _('Optional: do not add commercial licence.'), False)
105 def add_files(tar, name_archive, d_content, logger):
106 '''Create an archive containing all directories and files that are given in
107 the d_content argument.
109 :param tar tarfile: The tarfile instance used to make the archive.
110 :param name_archive str: The name of the archive to make.
111 :param d_content dict: The dictionary that contain all directories and files
112 to add in the archive.
114 (path_on_local_machine, path_in_archive)
115 :param logger Logger: the logging instance
116 :return: 0 if success, 1 if not.
119 # get the max length of the messages in order to make the display
120 max_len = len(max(d_content.keys(), key=len))
123 # loop over each directory or file stored in the d_content dictionary
124 for name in d_content.keys():
125 # display information
126 len_points = max_len - len(name)
127 logger.write(name + " " + len_points * "." + " ", 3)
128 # Get the local path and the path in archive
129 # of the directory or file to add
130 local_path, archive_path = d_content[name]
131 in_archive = os.path.join(name_archive, archive_path)
132 # Add it in the archive
134 tar.add(local_path, arcname=in_archive)
135 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
136 except Exception as e:
137 logger.write(src.printcolors.printcError(_("KO ")), 3)
138 logger.write(str(e), 3)
140 logger.write("\n", 3)
143 def produce_relative_launcher(config,
148 with_commercial=True):
149 '''Create a specific SALOME launcher for the binary package. This launcher
152 :param config Config: The global configuration.
153 :param logger Logger: the logging instance
154 :param file_dir str: the directory where to put the launcher
155 :param file_name str: The launcher name
156 :param binaries_dir_name str: the name of the repository where the binaries
158 :return: the path of the produced launcher
162 # Get the launcher template
163 profile_install_dir = os.path.join(binaries_dir_name,
164 config.APPLICATION.profile.product)
165 withProfile = src.fileEnviron.withProfile
166 withProfile = withProfile.replace(
167 "ABSOLUTE_APPLI_PATH'] = 'PROFILE_INSTALL_DIR'",
168 "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + profile_install_dir + "'")
169 withProfile = withProfile.replace(
170 "os.path.join( 'PROFILE_INSTALL_DIR'",
171 "os.path.join( out_dir_Path, '" + profile_install_dir + "'")
173 before, after = withProfile.split(
174 "# here your local standalone environment\n")
176 # create an environment file writer
177 writer = src.environment.FileEnvWriter(config,
182 filepath = os.path.join(file_dir, file_name)
183 # open the file and write into it
184 launch_file = open(filepath, "w")
185 launch_file.write(before)
187 writer.write_cfgForPy_file(launch_file,
188 for_package = binaries_dir_name,
189 with_commercial=with_commercial)
190 launch_file.write(after)
193 # Little hack to put out_dir_Path outside the strings
194 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
196 # change the rights in order to make the file executable for everybody
208 def produce_relative_env_files(config,
212 '''Create some specific environment files for the binary package. These
213 files use relative paths.
215 :param config Config: The global configuration.
216 :param logger Logger: the logging instance
217 :param file_dir str: the directory where to put the files
218 :param binaries_dir_name str: the name of the repository where the binaries
220 :return: the list of path of the produced environment files
223 # create an environment file writer
224 writer = src.environment.FileEnvWriter(config,
230 filepath = writer.write_env_file("env_launch.sh",
233 for_package = binaries_dir_name)
235 # Little hack to put out_dir_Path as environment variable
236 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
238 # change the rights in order to make the file executable for everybody
250 def product_appli_creation_script(config,
254 '''Create a script that can produce an application (EDF style) in the binary
257 :param config Config: The global configuration.
258 :param logger Logger: the logging instance
259 :param file_dir str: the directory where to put the file
260 :param binaries_dir_name str: the name of the repository where the binaries
262 :return: the path of the produced script file
265 template_name = "create_appli.py.for_bin_packages.template"
266 template_path = os.path.join(config.VARS.internal_dir, template_name)
267 text_to_fill = open(template_path, "r").read()
268 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
269 '"' + binaries_dir_name + '"')
272 for product_name in get_SALOME_modules(config):
273 product_info = src.product.get_product_config(config, product_name)
275 if src.product.product_is_smesh_plugin(product_info):
278 if 'install_dir' in product_info and bool(product_info.install_dir):
279 if src.product.product_is_cpp(product_info):
281 for cpp_name in src.product.get_product_components(product_info):
282 line_to_add = ("<module name=\"" +
284 "\" gui=\"yes\" path=\"''' + "
285 "os.path.join(dir_bin_name, \"" +
286 cpp_name + "\") + '''\"/>")
289 line_to_add = ("<module name=\"" +
291 "\" gui=\"yes\" path=\"''' + "
292 "os.path.join(dir_bin_name, \"" +
293 product_name + "\") + '''\"/>")
294 text_to_add += line_to_add + "\n"
296 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
298 tmp_file_path = os.path.join(file_dir, "create_appli.py")
299 ff = open(tmp_file_path, "w")
300 ff.write(filled_text)
303 # change the rights in order to make the file executable for everybody
304 os.chmod(tmp_file_path,
315 def binary_package(config, logger, options, tmp_working_dir):
316 '''Prepare a dictionary that stores all the needed directories and files to
317 add in a binary package.
319 :param config Config: The global configuration.
320 :param logger Logger: the logging instance
321 :param options OptResult: the options of the launched command
322 :param tmp_working_dir str: The temporary local directory containing some
323 specific directories or files needed in the
325 :return: the dictionary that stores all the needed directories and files to
326 add in a binary package.
327 {label : (path_on_local_machine, path_in_archive)}
331 # Get the list of product installation to add to the archive
332 l_products_name = config.APPLICATION.products.keys()
333 l_product_info = src.product.get_products_infos(l_products_name,
338 l_sources_not_present = []
339 for prod_name, prod_info in l_product_info:
341 # Add the sources of the products that have the property
342 # sources_in_package : "yes"
343 if src.get_property_in_product_cfg(prod_info,
344 "sources_in_package") == "yes":
345 if os.path.exists(prod_info.source_dir):
346 l_source_dir.append((prod_name, prod_info.source_dir))
348 l_sources_not_present.append(prod_name)
350 # ignore the native and fixed products for install directories
351 if (src.product.product_is_native(prod_info)
352 or src.product.product_is_fixed(prod_info)
353 or not src.product.product_compiles(prod_info)):
355 if src.product.check_installation(prod_info):
356 l_install_dir.append((prod_name, prod_info.install_dir))
358 l_not_installed.append(prod_name)
360 # Add also the cpp generated modules (if any)
361 if src.product.product_is_cpp(prod_info):
363 for name_cpp in src.product.get_product_components(prod_info):
364 install_dir = os.path.join(config.APPLICATION.workdir,
366 if os.path.exists(install_dir):
367 l_install_dir.append((name_cpp, install_dir))
369 l_not_installed.append(name_cpp)
371 # Print warning or error if there are some missing products
372 if len(l_not_installed) > 0:
373 text_missing_prods = ""
374 for p_name in l_not_installed:
375 text_missing_prods += "-" + p_name + "\n"
376 if not options.force_creation:
377 msg = _("ERROR: there are missing products installations:")
378 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
383 msg = _("WARNING: there are missing products installations:")
384 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
388 # Do the same for sources
389 if len(l_sources_not_present) > 0:
390 text_missing_prods = ""
391 for p_name in l_sources_not_present:
392 text_missing_prods += "-" + p_name + "\n"
393 if not options.force_creation:
394 msg = _("ERROR: there are missing products sources:")
395 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
400 msg = _("WARNING: there are missing products sources:")
401 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
405 # construct the name of the directory that will contain the binaries
406 binaries_dir_name = "BINARIES-" + config.VARS.dist
408 # construct the correlation table between the product names, there
409 # actual install directories and there install directory in archive
411 for prod_name, install_dir in l_install_dir:
412 path_in_archive = os.path.join(binaries_dir_name, prod_name)
413 d_products[prod_name] = (install_dir, path_in_archive)
415 for prod_name, source_dir in l_source_dir:
416 path_in_archive = os.path.join("SOURCES", prod_name)
417 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
419 # create the relative launcher and add it to the files to add
420 if ("profile" in config.APPLICATION and
421 "product" in config.APPLICATION.profile):
422 launcher_name = config.APPLICATION.profile.launcher_name
423 launcher_package = produce_relative_launcher(config,
428 not(options.without_commercial))
430 d_products["launcher"] = (launcher_package, launcher_name)
432 # No profile, it means that there has to be some environment files
433 env_file = produce_relative_env_files(config,
438 d_products["environment file"] = (env_file, "env_launch.sh")
440 # And provide a script for the creation of an application EDF style
441 appli_script = product_appli_creation_script(config,
446 d_products["appli script"] = (appli_script, "create_appli.py")
450 def source_package(sat, config, logger, options, tmp_working_dir):
451 '''Prepare a dictionary that stores all the needed directories and files to
452 add in a source package.
454 :param config Config: The global configuration.
455 :param logger Logger: the logging instance
456 :param options OptResult: the options of the launched command
457 :param tmp_working_dir str: The temporary local directory containing some
458 specific directories or files needed in the
460 :return: the dictionary that stores all the needed directories and files to
461 add in a source package.
462 {label : (path_on_local_machine, path_in_archive)}
466 # Get all the products that are prepared using an archive
467 logger.write("Find archive products ... ")
468 d_archives, l_pinfo_vcs = get_archives(config, logger)
469 logger.write("Done\n")
471 if not options.with_vcs and len(l_pinfo_vcs) > 0:
472 # Make archives with the products that are not prepared using an archive
473 # (git, cvs, svn, etc)
474 logger.write("Construct archives for vcs products ... ")
475 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
480 logger.write("Done\n")
483 logger.write("Create the project ... ")
484 d_project = create_project_for_src_package(config,
487 logger.write("Done\n")
490 tmp_sat = add_salomeTools(config, tmp_working_dir)
491 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
493 # Add a sat symbolic link if not win
494 if not src.architecture.is_windows():
495 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
499 # In the jobs, os.getcwd() can fail
500 t = config.USER.workdir
501 os.chdir(tmp_working_dir)
502 if os.path.lexists(tmp_satlink_path):
503 os.remove(tmp_satlink_path)
504 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
507 d_sat["sat link"] = (tmp_satlink_path, "sat")
509 return src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
511 def get_archives(config, logger):
512 '''Find all the products that are get using an archive and all the products
513 that are get using a vcs (git, cvs, svn) repository.
515 :param config Config: The global configuration.
516 :param logger Logger: the logging instance
517 :return: the dictionary {name_product :
518 (local path of its archive, path in the package of its archive )}
519 and the list of specific configuration corresponding to the vcs
523 # Get the list of product informations
524 l_products_name = config.APPLICATION.products.keys()
525 l_product_info = src.product.get_products_infos(l_products_name,
529 for p_name, p_info in l_product_info:
530 # ignore the native and fixed products
531 if (src.product.product_is_native(p_info)
532 or src.product.product_is_fixed(p_info)):
534 if p_info.get_source == "archive":
535 archive_path = p_info.archive_info.archive_name
536 archive_name = os.path.basename(archive_path)
538 l_pinfo_vcs.append((p_name, p_info))
540 d_archives[p_name] = (archive_path,
541 os.path.join(ARCHIVE_DIR, archive_name))
542 return d_archives, l_pinfo_vcs
544 def add_salomeTools(config, tmp_working_dir):
545 '''Prepare a version of salomeTools that has a specific site.pyconf file
546 configured for a source package.
548 :param config Config: The global configuration.
549 :param tmp_working_dir str: The temporary local directory containing some
550 specific directories or files needed in the
552 :return: The path to the local salomeTools directory to add in the package
555 # Copy sat in the temporary working directory
556 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
557 sat_running_path = src.Path(config.VARS.salometoolsway)
558 sat_running_path.copy(sat_tmp_path)
560 # Update the site.pyconf file that contains the path to the project
561 site_pyconf_name = "site.pyconf"
562 site_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
563 site_pyconf_file = os.path.join(site_pyconf_dir, site_pyconf_name)
564 ff = open(site_pyconf_file, "w")
565 ff.write(SITE_TEMPLATE)
568 return sat_tmp_path.path
570 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
571 '''For sources package that require that all products are get using an
572 archive, one has to create some archive for the vcs products.
573 So this method calls the clean and source command of sat and then create
576 :param l_pinfo_vcs List: The list of specific configuration corresponding to
578 :param sat Sat: The Sat instance that can be called to clean and source the
580 :param config Config: The global configuration.
581 :param logger Logger: the logging instance
582 :param tmp_working_dir str: The temporary local directory containing some
583 specific directories or files needed in the
585 :return: the dictionary that stores all the archives to add in the source
586 package. {label : (path_on_local_machine, path_in_archive)}
589 # clean the source directory of all the vcs products, then use the source
590 # command and thus construct an archive that will not contain the patches
591 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
593 logger.write(_("clean sources\n"))
594 args_clean = config.VARS.application
595 args_clean += " --sources --products "
596 args_clean += ",".join(l_prod_names)
597 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
599 logger.write(_("get sources"))
600 args_source = config.VARS.application
601 args_source += " --products "
602 args_source += ",".join(l_prod_names)
603 sat.source(args_source, batch=True, verbose=0, logger_add_link = logger)
605 # make the new archives
607 for pn, pinfo in l_pinfo_vcs:
608 path_archive = make_archive(pn, pinfo, tmp_working_dir)
609 d_archives_vcs[pn] = (path_archive,
610 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
611 return d_archives_vcs
613 def make_archive(prod_name, prod_info, where):
614 '''Create an archive of a product by searching its source directory.
616 :param prod_name str: The name of the product.
617 :param prod_info Config: The specific configuration corresponding to the
619 :param where str: The path of the repository where to put the resulting
621 :return: The path of the resulting archive
624 path_targz_prod = os.path.join(where, prod_name + ".tgz")
625 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
626 local_path = prod_info.source_dir
627 tar_prod.add(local_path, arcname=prod_name)
629 return path_targz_prod
631 def create_project_for_src_package(config, tmp_working_dir, with_vcs):
632 '''Create a specific project for a source package.
634 :param config Config: The global configuration.
635 :param tmp_working_dir str: The temporary local directory containing some
636 specific directories or files needed in the
638 :param with_vcs boolean: True if the package is with vcs products (not
639 transformed into archive products)
640 :return: The dictionary
641 {"project" : (produced project, project path in the archive)}
645 # Create in the working temporary directory the full project tree
646 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
647 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
649 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
652 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
655 patches_tmp_dir = os.path.join(project_tmp_dir,
658 application_tmp_dir = os.path.join(project_tmp_dir,
660 for directory in [project_tmp_dir,
661 compil_scripts_tmp_dir,
664 application_tmp_dir]:
665 src.ensure_path_exists(directory)
667 # Create the pyconf that contains the information of the project
668 project_pyconf_name = "project.pyconf"
669 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
670 ff = open(project_pyconf_file, "w")
671 ff.write(PROJECT_TEMPLATE)
674 # Loop over the products to get there pyconf and all the scripts
675 # (compilation, environment, patches)
676 # and create the pyconf file to add to the project
677 lproducts_name = config.APPLICATION.products.keys()
678 l_products = src.product.get_products_infos(lproducts_name, config)
679 for p_name, p_info in l_products:
680 # ignore native and fixed products
681 if (src.product.product_is_native(p_info) or
682 src.product.product_is_fixed(p_info)):
684 find_product_scripts_and_pyconf(p_name,
688 compil_scripts_tmp_dir,
691 products_pyconf_tmp_dir)
693 find_application_pyconf(config, application_tmp_dir)
695 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
698 def find_product_scripts_and_pyconf(p_name,
702 compil_scripts_tmp_dir,
705 products_pyconf_tmp_dir):
706 '''Create a specific pyconf file for a given product. Get its environment
707 script, its compilation script and patches and put it in the temporary
708 working directory. This method is used in the source package in order to
709 construct the specific project.
711 :param p_name str: The name of the product.
712 :param p_info Config: The specific configuration corresponding to the
714 :param config Config: The global configuration.
715 :param with_vcs boolean: True if the package is with vcs products (not
716 transformed into archive products)
717 :param compil_scripts_tmp_dir str: The path to the temporary compilation
718 scripts directory of the project.
719 :param env_scripts_tmp_dir str: The path to the temporary environment script
720 directory of the project.
721 :param patches_tmp_dir str: The path to the temporary patch scripts
722 directory of the project.
723 :param products_pyconf_tmp_dir str: The path to the temporary product
724 scripts directory of the project.
727 # read the pyconf of the product
728 product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
729 config.PATHS.PRODUCTPATH)
730 product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
732 # find the compilation script if any
733 if src.product.product_has_script(p_info):
734 compil_script_path = src.Path(p_info.compil_script)
735 compil_script_path.copy(compil_scripts_tmp_dir)
736 product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
737 p_info.compil_script)
738 # find the environment script if any
739 if src.product.product_has_env_script(p_info):
740 env_script_path = src.Path(p_info.environ.env_script)
741 env_script_path.copy(env_scripts_tmp_dir)
742 product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
743 p_info.environ.env_script)
744 # find the patches if any
745 if src.product.product_has_patches(p_info):
746 patches = src.pyconf.Sequence()
747 for patch_path in p_info.patches:
748 p_path = src.Path(patch_path)
749 p_path.copy(patches_tmp_dir)
750 patches.append(os.path.basename(patch_path), "")
752 product_pyconf_cfg[p_info.section].patches = patches
755 # put in the pyconf file the resolved values
756 for info in ["git_info", "cvs_info", "svn_info"]:
758 for key in p_info[info]:
759 product_pyconf_cfg[p_info.section][info][key] = p_info[
762 # if the product is not archive, then make it become archive.
763 if src.product.product_is_vcs(p_info):
764 product_pyconf_cfg[p_info.section].get_source = "archive"
765 if not "archive_info" in product_pyconf_cfg[p_info.section]:
766 product_pyconf_cfg[p_info.section].addMapping("archive_info",
767 src.pyconf.Mapping(product_pyconf_cfg),
769 product_pyconf_cfg[p_info.section
770 ].archive_info.archive_name = p_info.name + ".tgz"
772 # write the pyconf file to the temporary project location
773 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
775 ff = open(product_tmp_pyconf_path, 'w')
776 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
777 product_pyconf_cfg.__save__(ff, 1)
780 def find_application_pyconf(config, application_tmp_dir):
781 '''Find the application pyconf file and put it in the specific temporary
782 directory containing the specific project of a source package.
784 :param config Config: The global configuration.
785 :param application_tmp_dir str: The path to the temporary application
786 scripts directory of the project.
788 # read the pyconf of the application
789 application_name = config.VARS.application
790 application_pyconf_path = src.find_file_in_lpath(
791 application_name + ".pyconf",
792 config.PATHS.APPLICATIONPATH)
793 application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
796 application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
797 application_pyconf_cfg,
799 'VARS.salometoolsway + $VARS.sep + ".."')
801 # Prevent from compilation in base
802 application_pyconf_cfg.APPLICATION.no_base = "yes"
804 # write the pyconf file to the temporary application location
805 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
806 application_name + ".pyconf")
807 ff = open(application_tmp_pyconf_path, 'w')
808 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
809 application_pyconf_cfg.__save__(ff, 1)
812 def project_package(project_file_path, tmp_working_dir):
813 '''Prepare a dictionary that stores all the needed directories and files to
814 add in a project package.
816 :param project_file_path str: The path to the local project.
817 :param tmp_working_dir str: The temporary local directory containing some
818 specific directories or files needed in the
820 :return: the dictionary that stores all the needed directories and files to
821 add in a project package.
822 {label : (path_on_local_machine, path_in_archive)}
826 # Read the project file and get the directories to add to the package
827 project_pyconf_cfg = src.pyconf.Config(project_file_path)
828 paths = {"ARCHIVEPATH" : "archives",
829 "APPLICATIONPATH" : "applications",
830 "PRODUCTPATH" : "products",
832 "MACHINEPATH" : "machines"}
833 # Loop over the project paths and add it
835 if path not in project_pyconf_cfg:
837 # Add the directory to the files to add in the package
838 d_project[path] = (project_pyconf_cfg[path], paths[path])
839 # Modify the value of the path in the package
840 project_pyconf_cfg[path] = src.pyconf.Reference(
843 'project_path + "/' + paths[path] + '"')
846 if "project_path" not in project_pyconf_cfg:
847 project_pyconf_cfg.addMapping("project_path",
848 src.pyconf.Mapping(project_pyconf_cfg),
850 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
854 # Write the project pyconf file
855 project_file_name = os.path.basename(project_file_path)
856 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
857 ff = open(project_pyconf_tmp_path, 'w')
858 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
859 project_pyconf_cfg.__save__(ff, 1)
861 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_name)
865 def add_readme(config, package_type, where):
866 readme_path = os.path.join(where, "README")
867 f = open(readme_path, 'w')
868 # prepare substitution dictionary
870 if package_type == BINARY:
871 d['application'] = config.VARS.application
872 d['user'] = config.VARS.user
873 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
874 d['version'] = config.INTERNAL.sat_version
875 d['dist'] = config.VARS.dist
876 if 'profile' in config.APPLICATION:
877 d['launcher'] = config.APPLICATION.profile.launcher_name
878 readme_template_path = os.path.join(config.VARS.internal_dir,
879 "README_BIN.template")
881 d['env_file'] = 'env_launch.sh'
882 readme_template_path = os.path.join(config.VARS.internal_dir,
883 "README_BIN_NO_PROFILE.template")
885 if package_type == SOURCE:
886 d['application'] = config.VARS.application
887 d['user'] = config.VARS.user
888 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
889 d['version'] = config.INTERNAL.sat_version
890 if 'profile' in config.APPLICATION:
891 d['profile'] = config.APPLICATION.profile.product
892 d['launcher'] = config.APPLICATION.profile.launcher_name
893 readme_template_path = os.path.join(config.VARS.internal_dir,
894 "README_SRC.template")
896 if package_type == PROJECT:
897 d['user'] = config.VARS.user
898 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
899 d['version'] = config.INTERNAL.sat_version
900 readme_template_path = os.path.join(config.VARS.internal_dir,
901 "README_PROJECT.template")
903 if package_type == SAT:
904 d['user'] = config.VARS.user
905 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
906 d['version'] = config.INTERNAL.sat_version
907 readme_template_path = os.path.join(config.VARS.internal_dir,
908 "README_SAT.template")
910 f.write(src.template.substitute(readme_template_path, d))
916 '''method that is called when salomeTools is called with --help option.
918 :return: The text to display for the package command description.
921 return _("The package command creates an archive.\nThere are 4 kinds of "
922 "archive:\n 1- The binary archive. It contains all the product "
923 "installation directories and a launcher,\n 2- The sources archive."
924 " It contains the products archives, a project corresponding to "
925 "the application and salomeTools,\n 3- The project archive. It "
926 "contains a project (give the project file path as argument),\n 4-"
927 " The salomeTools archive. It contains salomeTools.\n\nexample:"
928 "\nsat package SALOME-master --sources")
930 def run(args, runner, logger):
931 '''method that is called when salomeTools is called with package parameter.
935 (options, args) = parser.parse_args(args)
937 # Check that a type of package is called, and only one
938 all_option_types = (options.binaries,
940 options.project not in ["", None],
943 # Check if no option for package type
944 if all_option_types.count(True) == 0:
945 msg = _("Error: Precise a type for the package\nUse one of the "
946 "following options: --binaries, --sources, --project or"
948 logger.write(src.printcolors.printcError(msg), 1)
949 logger.write("\n", 1)
952 # Check for only one option for package type
953 if all_option_types.count(True) > 1:
954 msg = _("Error: You can use only one type for the package\nUse only one"
955 " of the following options: --binaries, --sources, --project or"
957 logger.write(src.printcolors.printcError(msg), 1)
958 logger.write("\n", 1)
961 # Get the package type
963 package_type = BINARY
965 package_type = SOURCE
967 package_type = PROJECT
971 # The repository where to put the package if not Binary or Source
972 package_default_path = runner.cfg.USER.workdir
974 if package_type in [BINARY, SOURCE]:
975 # Check that the command has been called with an application
976 src.check_config_has_application(runner.cfg)
978 # Display information
979 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
980 runner.cfg.VARS.application), 1)
982 # Get the default directory where to put the packages
983 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir,
985 src.ensure_path_exists(package_default_path)
987 elif package_type == PROJECT:
988 # check that the project is visible by SAT
989 if options.project not in runner.cfg.PROJECTS.project_file_paths:
990 site_path = os.path.join(runner.cfg.VARS.salometoolsway,
993 msg = _("ERROR: the project %(proj)s is not visible by salomeTools."
994 "\nPlease add it in the %(site)s file." % {
995 "proj" : options.project, "site" : site_path})
996 logger.write(src.printcolors.printcError(msg), 1)
997 logger.write("\n", 1)
1001 src.printcolors.print_value(logger, "Package type", package_type, 2)
1003 # get the name of the archive or construct it
1005 if os.path.basename(options.name) == options.name:
1006 # only a name (not a path)
1007 archive_name = options.name
1008 dir_name = package_default_path
1010 archive_name = os.path.basename(options.name)
1011 dir_name = os.path.dirname(options.name)
1013 # suppress extension
1014 if archive_name[-len(".tgz"):] == ".tgz":
1015 archive_name = archive_name[:-len(".tgz")]
1016 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1017 archive_name = archive_name[:-len(".tar.gz")]
1020 dir_name = package_default_path
1021 if package_type == BINARY:
1022 archive_name = (runner.cfg.APPLICATION.name +
1024 runner.cfg.VARS.dist)
1026 if package_type == SOURCE:
1027 archive_name = (runner.cfg.APPLICATION.name +
1030 if options.with_vcs:
1031 archive_name = (runner.cfg.APPLICATION.name +
1037 if package_type == PROJECT:
1038 project_name, __ = os.path.splitext(
1039 os.path.basename(options.project))
1040 archive_name = ("PROJECT" +
1044 if package_type == SAT:
1045 archive_name = ("salomeTools" +
1047 runner.cfg.INTERNAL.sat_version)
1049 path_targz = os.path.join(dir_name, archive_name + ".tgz")
1051 # Print the path of the package
1052 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1054 # Create a working directory for all files that are produced during the
1055 # package creation and that will be removed at the end of the command
1056 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root,
1057 runner.cfg.VARS.datehour)
1058 src.ensure_path_exists(tmp_working_dir)
1059 logger.write("\n", 5)
1060 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1062 logger.write("\n", 3)
1064 msg = _("Preparation of files to add to the archive")
1065 logger.write(src.printcolors.printcLabel(msg), 2)
1066 logger.write("\n", 2)
1068 if package_type == BINARY:
1069 d_files_to_add = binary_package(runner.cfg,
1073 if not(d_files_to_add):
1076 if package_type == SOURCE:
1077 d_files_to_add = source_package(runner,
1083 if package_type == PROJECT:
1084 d_files_to_add = project_package(options.project, tmp_working_dir)
1086 if package_type == SAT:
1087 d_files_to_add = {"salomeTools" : (runner.cfg.VARS.salometoolsway, "")}
1089 # Add the README file in the package
1090 local_readme_tmp_path = add_readme(runner.cfg,
1093 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1095 # Add the additional files of option add_files
1096 if options.add_files:
1097 for file_path in options.add_files:
1098 if not os.path.exists(file_path):
1099 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1101 file_name = os.path.basename(file_path)
1102 d_files_to_add[file_name] = (file_path, file_name)
1104 logger.write("\n", 2)
1106 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1107 logger.write("\n", 2)
1110 # Creating the object tarfile
1111 tar = tarfile.open(path_targz, mode='w:gz')
1113 # Add the files to the tarfile object
1114 res = add_files(tar, archive_name, d_files_to_add, logger)
1116 except KeyboardInterrupt:
1117 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1118 logger.write(_("Removing the temporary working directory ... "), 1)
1119 # remove the working directory
1120 shutil.rmtree(tmp_working_dir)
1121 logger.write(_("OK"), 1)
1122 logger.write(_("\n"), 1)
1125 # remove the working directory
1126 shutil.rmtree(tmp_working_dir)
1128 # Print again the path of the package
1129 logger.write("\n", 2)
1130 src.printcolors.print_value(logger, "Package path", path_targz, 2)