3 # Copyright (C) 2010-2012 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
27 from application import get_SALOME_modules
34 ARCHIVE_DIR = "ARCHIVES"
35 PROJECT_DIR = "PROJECT"
37 PROJECT_TEMPLATE = """#!/usr/bin/env python
40 # The path to the archive root directory
41 root_path : $PWD + "/../"
43 project_path : $PWD + "/"
45 # Where to search the archives of the products
46 ARCHIVEPATH : $root_path + "ARCHIVES"
47 # Where to search the pyconf of the applications
48 APPLICATIONPATH : $project_path + "applications/"
49 # Where to search the pyconf of the products
50 PRODUCTPATH : $project_path + "products/"
51 # Where to search the pyconf of the jobs of the project
52 JOBPATH : $project_path + "jobs/"
53 # Where to search the pyconf of the machines of the project
54 MACHINEPATH : $project_path + "machines/"
57 SITE_TEMPLATE = ("""#!/usr/bin/env python
64 log_dir : $USER.workdir + "/LOGS"
67 tmp_dir_with_application : '/tmp' + $VARS.sep + $VARS.user + """
68 """$VARS.sep + $APPLICATION.name + $VARS.sep + 'test'
69 tmp_dir : '/tmp' + $VARS.sep + $VARS.user + $VARS.sep + 'test'
76 project_file_paths : [$VARS.salometoolsway + $VARS.sep + \"..\" + $VARS.sep"""
77 """ + \"""" + PROJECT_DIR + """\" + $VARS.sep + "project.pyconf"]
81 # Define all possible option for the package command : sat package <options>
82 parser = src.options.Options()
83 parser.add_option('b', 'binaries', 'boolean', 'binaries',
84 _('Optional: Produce a binary package.'), False)
85 parser.add_option('f', 'force_creation', 'boolean', 'force_creation',
86 _('Optional: Only binary package: produce the archive even if '
87 'there are some missing products.'), False)
88 parser.add_option('s', 'sources', 'boolean', 'sources',
89 _('Optional: Produce a compilable archive of the sources of the '
90 'application.'), False)
91 parser.add_option('', 'with_vcs', 'boolean', 'with_vcs',
92 _('Optional: Only source package: do not make archive of vcs products.'),
94 parser.add_option('p', 'project', 'string', 'project',
95 _('Optional: Produce an archive that contains a project.'), "")
96 parser.add_option('t', 'salometools', 'boolean', 'sat',
97 _('Optional: Produce an archive that contains salomeTools.'), False)
98 parser.add_option('n', 'name', 'string', 'name',
99 _('Optional: The name or full path of the archive.'), None)
100 parser.add_option('', 'add_files', 'list2', 'add_files',
101 _('Optional: The list of additional files to add to the archive.'), [])
102 parser.add_option('', 'without_commercial', 'boolean', 'without_commercial',
103 _('Optional: do not add commercial licence.'), False)
105 def add_files(tar, name_archive, d_content, logger):
106 '''Create an archive containing all directories and files that are given in
107 the d_content argument.
109 :param tar tarfile: The tarfile instance used to make the archive.
110 :param name_archive str: The name of the archive to make.
111 :param d_content dict: The dictionary that contain all directories and files
112 to add in the archive.
114 (path_on_local_machine, path_in_archive)
115 :param logger Logger: the logging instance
116 :return: 0 if success, 1 if not.
119 # get the max length of the messages in order to make the display
120 max_len = len(max(d_content.keys(), key=len))
123 # loop over each directory or file stored in the d_content dictionary
124 for name in d_content.keys():
125 # display information
126 len_points = max_len - len(name)
127 logger.write(name + " " + len_points * "." + " ", 3)
128 # Get the local path and the path in archive
129 # of the directory or file to add
130 local_path, archive_path = d_content[name]
131 in_archive = os.path.join(name_archive, archive_path)
132 # Add it in the archive
134 tar.add(local_path, arcname=in_archive)
135 logger.write(src.printcolors.printcSuccess(_("OK")), 3)
136 except Exception as e:
137 logger.write(src.printcolors.printcError(_("KO ")), 3)
138 logger.write(str(e), 3)
140 logger.write("\n", 3)
143 def produce_relative_launcher(config,
148 with_commercial=True):
149 '''Create a specific SALOME launcher for the binary package. This launcher
152 :param config Config: The global configuration.
153 :param logger Logger: the logging instance
154 :param file_dir str: the directory where to put the launcher
155 :param file_name str: The launcher name
156 :param binaries_dir_name str: the name of the repository where the binaries
158 :return: the path of the produced launcher
162 # Get the launcher template
163 profile_install_dir = os.path.join(binaries_dir_name,
164 config.APPLICATION.profile.product)
165 withProfile = src.fileEnviron.withProfile
166 withProfile = withProfile.replace(
167 "ABSOLUTE_APPLI_PATH'] = 'PROFILE_INSTALL_DIR'",
168 "ABSOLUTE_APPLI_PATH'] = out_dir_Path + '" + config.VARS.sep + profile_install_dir + "'")
169 withProfile = withProfile.replace(
170 "os.path.join( 'PROFILE_INSTALL_DIR'",
171 "os.path.join( out_dir_Path, '" + profile_install_dir + "'")
173 before, after = withProfile.split(
174 "# here your local standalone environment\n")
176 # create an environment file writer
177 writer = src.environment.FileEnvWriter(config,
182 filepath = os.path.join(file_dir, file_name)
183 # open the file and write into it
184 launch_file = open(filepath, "w")
185 launch_file.write(before)
187 writer.write_cfgForPy_file(launch_file,
188 for_package = binaries_dir_name,
189 with_commercial=with_commercial)
190 launch_file.write(after)
193 # Little hack to put out_dir_Path outside the strings
194 src.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' )
196 # change the rights in order to make the file executable for everybody
208 def produce_relative_env_files(config,
212 '''Create some specific environment files for the binary package. These
213 files use relative paths.
215 :param config Config: The global configuration.
216 :param logger Logger: the logging instance
217 :param file_dir str: the directory where to put the files
218 :param binaries_dir_name str: the name of the repository where the binaries
220 :return: the list of path of the produced environment files
223 # create an environment file writer
224 writer = src.environment.FileEnvWriter(config,
230 filepath = writer.write_env_file("env_launch.sh",
233 for_package = binaries_dir_name)
235 # Little hack to put out_dir_Path as environment variable
236 src.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' )
238 # change the rights in order to make the file executable for everybody
250 def product_appli_creation_script(config,
254 '''Create a script that can produce an application (EDF style) in the binary
257 :param config Config: The global configuration.
258 :param logger Logger: the logging instance
259 :param file_dir str: the directory where to put the file
260 :param binaries_dir_name str: the name of the repository where the binaries
262 :return: the path of the produced script file
265 template_name = "create_appli.py.for_bin_packages.template"
266 template_path = os.path.join(config.VARS.internal_dir, template_name)
267 text_to_fill = open(template_path, "r").read()
268 text_to_fill = text_to_fill.replace("TO BE FILLED 1",
269 '"' + binaries_dir_name + '"')
272 for product_name in get_SALOME_modules(config):
273 product_info = src.product.get_product_config(config, product_name)
275 if src.product.product_is_smesh_plugin(product_info):
278 if 'install_dir' in product_info and bool(product_info.install_dir):
279 if src.product.product_is_cpp(product_info):
281 for cpp_name in src.product.get_product_components(product_info):
282 line_to_add = ("<module name=\"" +
284 "\" gui=\"yes\" path=\"''' + "
285 "os.path.join(dir_bin_name, \"" +
286 cpp_name + "\") + '''\"/>")
289 line_to_add = ("<module name=\"" +
291 "\" gui=\"yes\" path=\"''' + "
292 "os.path.join(dir_bin_name, \"" +
293 product_name + "\") + '''\"/>")
294 text_to_add += line_to_add + "\n"
296 filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add)
298 tmp_file_path = os.path.join(file_dir, "create_appli.py")
299 ff = open(tmp_file_path, "w")
300 ff.write(filled_text)
303 # change the rights in order to make the file executable for everybody
304 os.chmod(tmp_file_path,
315 def binary_package(config, logger, options, tmp_working_dir):
316 '''Prepare a dictionary that stores all the needed directories and files to
317 add in a binary package.
319 :param config Config: The global configuration.
320 :param logger Logger: the logging instance
321 :param options OptResult: the options of the launched command
322 :param tmp_working_dir str: The temporary local directory containing some
323 specific directories or files needed in the
325 :return: the dictionary that stores all the needed directories and files to
326 add in a binary package.
327 {label : (path_on_local_machine, path_in_archive)}
331 # Get the list of product installation to add to the archive
332 l_products_name = config.APPLICATION.products.keys()
333 l_product_info = src.product.get_products_infos(l_products_name,
338 l_sources_not_present = []
339 for prod_name, prod_info in l_product_info:
340 # ignore the native and fixed products
341 if (src.product.product_is_native(prod_info)
342 or src.product.product_is_fixed(prod_info)
343 or not src.product.product_compiles(prod_info)):
345 if src.product.check_installation(prod_info):
346 l_install_dir.append((prod_name, prod_info.install_dir))
348 l_not_installed.append(prod_name)
350 # Add also the cpp generated modules (if any)
351 if src.product.product_is_cpp(prod_info):
353 for name_cpp in src.product.get_product_components(prod_info):
354 install_dir = os.path.join(config.APPLICATION.workdir,
356 if os.path.exists(install_dir):
357 l_install_dir.append((name_cpp, install_dir))
359 l_not_installed.append(name_cpp)
361 # Add the sources of the products that have the property
362 # sources_in_package : "yes"
363 if src.get_property_in_product_cfg(prod_info,
364 "sources_in_package") == "yes":
365 if os.path.exists(prod_info.source_dir):
366 l_source_dir.append((prod_name, prod_info.source_dir))
368 l_sources_not_present.append(prod_name)
370 # Print warning or error if there are some missing products
371 if len(l_not_installed) > 0:
372 text_missing_prods = ""
373 for p_name in l_not_installed:
374 text_missing_prods += "-" + p_name + "\n"
375 if not options.force_creation:
376 msg = _("ERROR: there are missing products installations:")
377 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
382 msg = _("WARNING: there are missing products installations:")
383 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
387 # Do the same for sources
388 if len(l_sources_not_present) > 0:
389 text_missing_prods = ""
390 for p_name in l_sources_not_present:
391 text_missing_prods += "-" + p_name + "\n"
392 if not options.force_creation:
393 msg = _("ERROR: there are missing products sources:")
394 logger.write("%s\n%s" % (src.printcolors.printcError(msg),
399 msg = _("WARNING: there are missing products sources:")
400 logger.write("%s\n%s" % (src.printcolors.printcWarning(msg),
404 # construct the name of the directory that will contain the binaries
405 binaries_dir_name = "BINARIES-" + config.VARS.dist
407 # construct the correlation table between the product names, there
408 # actual install directories and there install directory in archive
410 for prod_name, install_dir in l_install_dir:
411 path_in_archive = os.path.join(binaries_dir_name, prod_name)
412 d_products[prod_name] = (install_dir, path_in_archive)
414 for prod_name, source_dir in l_source_dir:
415 path_in_archive = os.path.join("SOURCES", prod_name)
416 d_products[prod_name + " (sources)"] = (source_dir, path_in_archive)
418 # create the relative launcher and add it to the files to add
419 if ("profile" in config.APPLICATION and
420 "product" in config.APPLICATION.profile):
421 launcher_name = config.APPLICATION.profile.launcher_name
422 launcher_package = produce_relative_launcher(config,
427 not(options.without_commercial))
429 d_products["launcher"] = (launcher_package, launcher_name)
431 # No profile, it means that there has to be some environment files
432 env_file = produce_relative_env_files(config,
437 d_products["environment file"] = (env_file, "env_launch.sh")
439 # And provide a script for the creation of an application EDF style
440 appli_script = product_appli_creation_script(config,
445 d_products["appli script"] = (appli_script, "create_appli.py")
449 def source_package(sat, config, logger, options, tmp_working_dir):
450 '''Prepare a dictionary that stores all the needed directories and files to
451 add in a source package.
453 :param config Config: The global configuration.
454 :param logger Logger: the logging instance
455 :param options OptResult: the options of the launched command
456 :param tmp_working_dir str: The temporary local directory containing some
457 specific directories or files needed in the
459 :return: the dictionary that stores all the needed directories and files to
460 add in a source package.
461 {label : (path_on_local_machine, path_in_archive)}
465 # Get all the products that are prepared using an archive
466 logger.write("Find archive products ... ")
467 d_archives, l_pinfo_vcs = get_archives(config, logger)
468 logger.write("Done\n")
470 if not options.with_vcs and len(l_pinfo_vcs) > 0:
471 # Make archives with the products that are not prepared using an archive
472 # (git, cvs, svn, etc)
473 logger.write("Construct archives for vcs products ... ")
474 d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
479 logger.write("Done\n")
482 logger.write("Create the project ... ")
483 d_project = create_project_for_src_package(config,
486 logger.write("Done\n")
489 tmp_sat = add_salomeTools(config, tmp_working_dir)
490 d_sat = {"salomeTools" : (tmp_sat, "salomeTools")}
492 # Add a sat symbolic link if not win
493 if not src.architecture.is_windows():
494 tmp_satlink_path = os.path.join(tmp_working_dir, 'sat')
498 # In the jobs, os.getcwd() can fail
499 t = config.USER.workdir
500 os.chdir(tmp_working_dir)
501 if os.path.lexists(tmp_satlink_path):
502 os.remove(tmp_satlink_path)
503 os.symlink(os.path.join('salomeTools', 'sat'), 'sat')
506 d_sat["sat link"] = (tmp_satlink_path, "sat")
508 return src.merge_dicts(d_archives, d_archives_vcs, d_project, d_sat)
510 def get_archives(config, logger):
511 '''Find all the products that are get using an archive and all the products
512 that are get using a vcs (git, cvs, svn) repository.
514 :param config Config: The global configuration.
515 :param logger Logger: the logging instance
516 :return: the dictionary {name_product :
517 (local path of its archive, path in the package of its archive )}
518 and the list of specific configuration corresponding to the vcs
522 # Get the list of product informations
523 l_products_name = config.APPLICATION.products.keys()
524 l_product_info = src.product.get_products_infos(l_products_name,
528 for p_name, p_info in l_product_info:
529 # ignore the native and fixed products
530 if (src.product.product_is_native(p_info)
531 or src.product.product_is_fixed(p_info)):
533 if p_info.get_source == "archive":
534 archive_path = p_info.archive_info.archive_name
535 archive_name = os.path.basename(archive_path)
537 l_pinfo_vcs.append((p_name, p_info))
539 d_archives[p_name] = (archive_path,
540 os.path.join(ARCHIVE_DIR, archive_name))
541 return d_archives, l_pinfo_vcs
543 def add_salomeTools(config, tmp_working_dir):
544 '''Prepare a version of salomeTools that has a specific site.pyconf file
545 configured for a source package.
547 :param config Config: The global configuration.
548 :param tmp_working_dir str: The temporary local directory containing some
549 specific directories or files needed in the
551 :return: The path to the local salomeTools directory to add in the package
554 # Copy sat in the temporary working directory
555 sat_tmp_path = src.Path(os.path.join(tmp_working_dir, "salomeTools"))
556 sat_running_path = src.Path(config.VARS.salometoolsway)
557 sat_running_path.copy(sat_tmp_path)
559 # Update the site.pyconf file that contains the path to the project
560 site_pyconf_name = "site.pyconf"
561 site_pyconf_dir = os.path.join(tmp_working_dir, "salomeTools", "data")
562 site_pyconf_file = os.path.join(site_pyconf_dir, site_pyconf_name)
563 ff = open(site_pyconf_file, "w")
564 ff.write(SITE_TEMPLATE)
567 return sat_tmp_path.path
569 def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir):
570 '''For sources package that require that all products are get using an
571 archive, one has to create some archive for the vcs products.
572 So this method calls the clean and source command of sat and then create
575 :param l_pinfo_vcs List: The list of specific configuration corresponding to
577 :param sat Sat: The Sat instance that can be called to clean and source the
579 :param config Config: The global configuration.
580 :param logger Logger: the logging instance
581 :param tmp_working_dir str: The temporary local directory containing some
582 specific directories or files needed in the
584 :return: the dictionary that stores all the archives to add in the source
585 package. {label : (path_on_local_machine, path_in_archive)}
588 # clean the source directory of all the vcs products, then use the source
589 # command and thus construct an archive that will not contain the patches
590 l_prod_names = [pn for pn, __ in l_pinfo_vcs]
592 logger.write(_("clean sources\n"))
593 args_clean = config.VARS.application
594 args_clean += " --sources --products "
595 args_clean += ",".join(l_prod_names)
596 sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
598 logger.write(_("get sources"))
599 args_source = config.VARS.application
600 args_source += " --products "
601 args_source += ",".join(l_prod_names)
602 sat.source(args_source, batch=True, verbose=0, logger_add_link = logger)
604 # make the new archives
606 for pn, pinfo in l_pinfo_vcs:
607 path_archive = make_archive(pn, pinfo, tmp_working_dir)
608 d_archives_vcs[pn] = (path_archive,
609 os.path.join(ARCHIVE_DIR, pn + ".tgz"))
610 return d_archives_vcs
612 def make_archive(prod_name, prod_info, where):
613 '''Create an archive of a product by searching its source directory.
615 :param prod_name str: The name of the product.
616 :param prod_info Config: The specific configuration corresponding to the
618 :param where str: The path of the repository where to put the resulting
620 :return: The path of the resulting archive
623 path_targz_prod = os.path.join(where, prod_name + ".tgz")
624 tar_prod = tarfile.open(path_targz_prod, mode='w:gz')
625 local_path = prod_info.source_dir
626 tar_prod.add(local_path, arcname=prod_name)
628 return path_targz_prod
630 def create_project_for_src_package(config, tmp_working_dir, with_vcs):
631 '''Create a specific project for a source package.
633 :param config Config: The global configuration.
634 :param tmp_working_dir str: The temporary local directory containing some
635 specific directories or files needed in the
637 :param with_vcs boolean: True if the package is with vcs products (not
638 transformed into archive products)
639 :return: The dictionary
640 {"project" : (produced project, project path in the archive)}
644 # Create in the working temporary directory the full project tree
645 project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR)
646 products_pyconf_tmp_dir = os.path.join(project_tmp_dir,
648 compil_scripts_tmp_dir = os.path.join(project_tmp_dir,
651 env_scripts_tmp_dir = os.path.join(project_tmp_dir,
654 patches_tmp_dir = os.path.join(project_tmp_dir,
657 application_tmp_dir = os.path.join(project_tmp_dir,
659 for directory in [project_tmp_dir,
660 compil_scripts_tmp_dir,
663 application_tmp_dir]:
664 src.ensure_path_exists(directory)
666 # Create the pyconf that contains the information of the project
667 project_pyconf_name = "project.pyconf"
668 project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name)
669 ff = open(project_pyconf_file, "w")
670 ff.write(PROJECT_TEMPLATE)
673 # Loop over the products to get there pyconf and all the scripts
674 # (compilation, environment, patches)
675 # and create the pyconf file to add to the project
676 lproducts_name = config.APPLICATION.products.keys()
677 l_products = src.product.get_products_infos(lproducts_name, config)
678 for p_name, p_info in l_products:
679 # ignore native and fixed products
680 if (src.product.product_is_native(p_info) or
681 src.product.product_is_fixed(p_info)):
683 find_product_scripts_and_pyconf(p_name,
687 compil_scripts_tmp_dir,
690 products_pyconf_tmp_dir)
692 find_application_pyconf(config, application_tmp_dir)
694 d_project = {"project" : (project_tmp_dir, PROJECT_DIR )}
697 def find_product_scripts_and_pyconf(p_name,
701 compil_scripts_tmp_dir,
704 products_pyconf_tmp_dir):
705 '''Create a specific pyconf file for a given product. Get its environment
706 script, its compilation script and patches and put it in the temporary
707 working directory. This method is used in the source package in order to
708 construct the specific project.
710 :param p_name str: The name of the product.
711 :param p_info Config: The specific configuration corresponding to the
713 :param config Config: The global configuration.
714 :param with_vcs boolean: True if the package is with vcs products (not
715 transformed into archive products)
716 :param compil_scripts_tmp_dir str: The path to the temporary compilation
717 scripts directory of the project.
718 :param env_scripts_tmp_dir str: The path to the temporary environment script
719 directory of the project.
720 :param patches_tmp_dir str: The path to the temporary patch scripts
721 directory of the project.
722 :param products_pyconf_tmp_dir str: The path to the temporary product
723 scripts directory of the project.
726 # read the pyconf of the product
727 product_pyconf_path = src.find_file_in_lpath(p_name + ".pyconf",
728 config.PATHS.PRODUCTPATH)
729 product_pyconf_cfg = src.pyconf.Config(product_pyconf_path)
731 # find the compilation script if any
732 if src.product.product_has_script(p_info):
733 compil_script_path = src.Path(p_info.compil_script)
734 compil_script_path.copy(compil_scripts_tmp_dir)
735 product_pyconf_cfg[p_info.section].compil_script = os.path.basename(
736 p_info.compil_script)
737 # find the environment script if any
738 if src.product.product_has_env_script(p_info):
739 env_script_path = src.Path(p_info.environ.env_script)
740 env_script_path.copy(env_scripts_tmp_dir)
741 product_pyconf_cfg[p_info.section].environ.env_script = os.path.basename(
742 p_info.environ.env_script)
743 # find the patches if any
744 if src.product.product_has_patches(p_info):
745 patches = src.pyconf.Sequence()
746 for patch_path in p_info.patches:
747 p_path = src.Path(patch_path)
748 p_path.copy(patches_tmp_dir)
749 patches.append(os.path.basename(patch_path), "")
751 product_pyconf_cfg[p_info.section].patches = patches
754 # put in the pyconf file the resolved values
755 for info in ["git_info", "cvs_info", "svn_info"]:
757 for key in p_info[info]:
758 product_pyconf_cfg[p_info.section][info][key] = p_info[
761 # if the product is not archive, then make it become archive.
762 if src.product.product_is_vcs(p_info):
763 product_pyconf_cfg[p_info.section].get_source = "archive"
764 if not "archive_info" in product_pyconf_cfg[p_info.section]:
765 product_pyconf_cfg[p_info.section].addMapping("archive_info",
766 src.pyconf.Mapping(product_pyconf_cfg),
768 product_pyconf_cfg[p_info.section
769 ].archive_info.archive_name = p_info.name + ".tgz"
771 # write the pyconf file to the temporary project location
772 product_tmp_pyconf_path = os.path.join(products_pyconf_tmp_dir,
774 ff = open(product_tmp_pyconf_path, 'w')
775 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
776 product_pyconf_cfg.__save__(ff, 1)
779 def find_application_pyconf(config, application_tmp_dir):
780 '''Find the application pyconf file and put it in the specific temporary
781 directory containing the specific project of a source package.
783 :param config Config: The global configuration.
784 :param application_tmp_dir str: The path to the temporary application
785 scripts directory of the project.
787 # read the pyconf of the application
788 application_name = config.VARS.application
789 application_pyconf_path = src.find_file_in_lpath(
790 application_name + ".pyconf",
791 config.PATHS.APPLICATIONPATH)
792 application_pyconf_cfg = src.pyconf.Config(application_pyconf_path)
795 application_pyconf_cfg.APPLICATION.workdir = src.pyconf.Reference(
796 application_pyconf_cfg,
798 'VARS.salometoolsway + $VARS.sep + ".."')
800 # Prevent from compilation in base
801 application_pyconf_cfg.APPLICATION.no_base = "yes"
803 # write the pyconf file to the temporary application location
804 application_tmp_pyconf_path = os.path.join(application_tmp_dir,
805 application_name + ".pyconf")
806 ff = open(application_tmp_pyconf_path, 'w')
807 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
808 application_pyconf_cfg.__save__(ff, 1)
811 def project_package(project_file_path, tmp_working_dir):
812 '''Prepare a dictionary that stores all the needed directories and files to
813 add in a project package.
815 :param project_file_path str: The path to the local project.
816 :param tmp_working_dir str: The temporary local directory containing some
817 specific directories or files needed in the
819 :return: the dictionary that stores all the needed directories and files to
820 add in a project package.
821 {label : (path_on_local_machine, path_in_archive)}
825 # Read the project file and get the directories to add to the package
826 project_pyconf_cfg = src.pyconf.Config(project_file_path)
827 paths = {"ARCHIVEPATH" : "archives",
828 "APPLICATIONPATH" : "applications",
829 "PRODUCTPATH" : "products",
831 "MACHINEPATH" : "machines"}
832 # Loop over the project paths and add it
834 if path not in project_pyconf_cfg:
836 # Add the directory to the files to add in the package
837 d_project[path] = (project_pyconf_cfg[path], paths[path])
838 # Modify the value of the path in the package
839 project_pyconf_cfg[path] = src.pyconf.Reference(
842 'project_path + "/' + paths[path] + '"')
845 if "project_path" not in project_pyconf_cfg:
846 project_pyconf_cfg.addMapping("project_path",
847 src.pyconf.Mapping(project_pyconf_cfg),
849 project_pyconf_cfg.project_path = src.pyconf.Reference(project_pyconf_cfg,
853 # Write the project pyconf file
854 project_file_name = os.path.basename(project_file_path)
855 project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name)
856 ff = open(project_pyconf_tmp_path, 'w')
857 ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n")
858 project_pyconf_cfg.__save__(ff, 1)
860 d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_name)
864 def add_readme(config, package_type, where):
865 readme_path = os.path.join(where, "README")
866 f = open(readme_path, 'w')
867 # prepare substitution dictionary
869 if package_type == BINARY:
870 d['application'] = config.VARS.application
871 d['user'] = config.VARS.user
872 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
873 d['version'] = config.INTERNAL.sat_version
874 d['dist'] = config.VARS.dist
875 if 'profile' in config.APPLICATION:
876 d['launcher'] = config.APPLICATION.profile.launcher_name
877 readme_template_path = os.path.join(config.VARS.internal_dir,
878 "README_BIN.template")
880 d['env_file'] = 'env_launch.sh'
881 readme_template_path = os.path.join(config.VARS.internal_dir,
882 "README_BIN_NO_PROFILE.template")
884 if package_type == SOURCE:
885 d['application'] = config.VARS.application
886 d['user'] = config.VARS.user
887 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
888 d['version'] = config.INTERNAL.sat_version
889 if 'profile' in config.APPLICATION:
890 d['profile'] = config.APPLICATION.profile.product
891 d['launcher'] = config.APPLICATION.profile.launcher_name
892 readme_template_path = os.path.join(config.VARS.internal_dir,
893 "README_SRC.template")
895 if package_type == PROJECT:
896 d['user'] = config.VARS.user
897 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
898 d['version'] = config.INTERNAL.sat_version
899 readme_template_path = os.path.join(config.VARS.internal_dir,
900 "README_PROJECT.template")
902 if package_type == SAT:
903 d['user'] = config.VARS.user
904 d['date'] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M")
905 d['version'] = config.INTERNAL.sat_version
906 readme_template_path = os.path.join(config.VARS.internal_dir,
907 "README_SAT.template")
909 f.write(src.template.substitute(readme_template_path, d))
915 '''method that is called when salomeTools is called with --help option.
917 :return: The text to display for the package command description.
920 return _("The package command creates an archive.\nThere are 4 kinds of "
921 "archive:\n 1- The binary archive. It contains all the product "
922 "installation directories and a launcher,\n 2- The sources archive."
923 " It contains the products archives, a project corresponding to "
924 "the application and salomeTools,\n 3- The project archive. It "
925 "contains a project (give the project file path as argument),\n 4-"
926 " The salomeTools archive. It contains salomeTools.\n\nexample:"
927 "\nsat package SALOME-master --sources")
929 def run(args, runner, logger):
930 '''method that is called when salomeTools is called with package parameter.
934 (options, args) = parser.parse_args(args)
936 # Check that a type of package is called, and only one
937 all_option_types = (options.binaries,
939 options.project not in ["", None],
942 # Check if no option for package type
943 if all_option_types.count(True) == 0:
944 msg = _("Error: Precise a type for the package\nUse one of the "
945 "following options: --binaries, --sources, --project or"
947 logger.write(src.printcolors.printcError(msg), 1)
948 logger.write("\n", 1)
951 # Check for only one option for package type
952 if all_option_types.count(True) > 1:
953 msg = _("Error: You can use only one type for the package\nUse only one"
954 " of the following options: --binaries, --sources, --project or"
956 logger.write(src.printcolors.printcError(msg), 1)
957 logger.write("\n", 1)
960 # Get the package type
962 package_type = BINARY
964 package_type = SOURCE
966 package_type = PROJECT
970 # The repository where to put the package if not Binary or Source
971 package_default_path = runner.cfg.USER.workdir
973 if package_type in [BINARY, SOURCE]:
974 # Check that the command has been called with an application
975 src.check_config_has_application(runner.cfg)
977 # Display information
978 logger.write(_("Packaging application %s\n") % src.printcolors.printcLabel(
979 runner.cfg.VARS.application), 1)
981 # Get the default directory where to put the packages
982 package_default_path = os.path.join(runner.cfg.APPLICATION.workdir,
984 src.ensure_path_exists(package_default_path)
986 elif package_type == PROJECT:
987 # check that the project is visible by SAT
988 if options.project not in runner.cfg.PROJECTS.project_file_paths:
989 site_path = os.path.join(runner.cfg.VARS.salometoolsway,
992 msg = _("ERROR: the project %(proj)s is not visible by salomeTools."
993 "\nPlease add it in the %(site)s file." % {
994 "proj" : options.project, "site" : site_path})
995 logger.write(src.printcolors.printcError(msg), 1)
996 logger.write("\n", 1)
1000 src.printcolors.print_value(logger, "Package type", package_type, 2)
1002 # get the name of the archive or construct it
1004 if os.path.basename(options.name) == options.name:
1005 # only a name (not a path)
1006 archive_name = options.name
1007 dir_name = package_default_path
1009 archive_name = os.path.basename(options.name)
1010 dir_name = os.path.dirname(options.name)
1012 # suppress extension
1013 if archive_name[-len(".tgz"):] == ".tgz":
1014 archive_name = archive_name[:-len(".tgz")]
1015 if archive_name[-len(".tar.gz"):] == ".tar.gz":
1016 archive_name = archive_name[:-len(".tar.gz")]
1019 dir_name = package_default_path
1020 if package_type == BINARY:
1021 archive_name = (runner.cfg.APPLICATION.name +
1023 runner.cfg.VARS.dist)
1025 if package_type == SOURCE:
1026 archive_name = (runner.cfg.APPLICATION.name +
1029 if options.with_vcs:
1030 archive_name = (runner.cfg.APPLICATION.name +
1036 if package_type == PROJECT:
1037 project_name, __ = os.path.splitext(
1038 os.path.basename(options.project))
1039 archive_name = ("PROJECT" +
1043 if package_type == SAT:
1044 archive_name = ("salomeTools" +
1046 runner.cfg.INTERNAL.sat_version)
1048 path_targz = os.path.join(dir_name, archive_name + ".tgz")
1050 # Print the path of the package
1051 src.printcolors.print_value(logger, "Package path", path_targz, 2)
1053 # Create a working directory for all files that are produced during the
1054 # package creation and that will be removed at the end of the command
1055 tmp_working_dir = os.path.join(runner.cfg.VARS.tmp_root,
1056 runner.cfg.VARS.datehour)
1057 src.ensure_path_exists(tmp_working_dir)
1058 logger.write("\n", 5)
1059 logger.write(_("The temporary working directory: %s\n" % tmp_working_dir),5)
1061 logger.write("\n", 3)
1063 msg = _("Preparation of files to add to the archive")
1064 logger.write(src.printcolors.printcLabel(msg), 2)
1065 logger.write("\n", 2)
1067 if package_type == BINARY:
1068 d_files_to_add = binary_package(runner.cfg,
1072 if not(d_files_to_add):
1075 if package_type == SOURCE:
1076 d_files_to_add = source_package(runner,
1082 if package_type == PROJECT:
1083 d_files_to_add = project_package(options.project, tmp_working_dir)
1085 if package_type == SAT:
1086 d_files_to_add = {"salomeTools" : (runner.cfg.VARS.salometoolsway, "")}
1088 # Add the README file in the package
1089 local_readme_tmp_path = add_readme(runner.cfg,
1092 d_files_to_add["README"] = (local_readme_tmp_path, "README")
1094 # Add the additional files of option add_files
1095 if options.add_files:
1096 for file_path in options.add_files:
1097 if not os.path.exists(file_path):
1098 msg = _("WARNING: the file %s is not accessible.\n" % file_path)
1100 file_name = os.path.basename(file_path)
1101 d_files_to_add[file_name] = (file_path, file_name)
1103 logger.write("\n", 2)
1105 logger.write(src.printcolors.printcLabel(_("Actually do the package")), 2)
1106 logger.write("\n", 2)
1109 # Creating the object tarfile
1110 tar = tarfile.open(path_targz, mode='w:gz')
1112 # Add the files to the tarfile object
1113 res = add_files(tar, archive_name, d_files_to_add, logger)
1115 except KeyboardInterrupt:
1116 logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
1117 logger.write(_("Removing the temporary working directory ... "), 1)
1118 # remove the working directory
1119 shutil.rmtree(tmp_working_dir)
1120 logger.write(_("OK"), 1)
1121 logger.write(_("\n"), 1)
1124 # remove the working directory
1125 shutil.rmtree(tmp_working_dir)
1127 # Print again the path of the package
1128 logger.write("\n", 2)
1129 src.printcolors.print_value(logger, "Package path", path_targz, 2)