for name in sorted(d_content.keys()):
# display information
len_points = max_len - len(name)
- logger.write(name + " " + len_points * "." + " ", 3)
- # Get the local path and the path in archive
- # of the directory or file to add
local_path, archive_path = d_content[name]
in_archive = os.path.join(name_archive, archive_path)
+ logger.write(name + " " + len_points * "." + " "+ in_archive + " ", 3)
+ # Get the local path and the path in archive
+ # of the directory or file to add
# Add it in the archive
try:
tar.add(local_path, arcname=in_archive, exclude=f_exclude)
# clean the source directory of all the vcs products, then use the source
# command and thus construct an archive that will not contain the patches
l_prod_names = [pn for pn, __ in l_pinfo_vcs]
- # clean
- logger.write(_("clean sources\n"))
- args_clean = config.VARS.application
- args_clean += " --sources --products "
- args_clean += ",".join(l_prod_names)
- sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
- # source
- logger.write(_("get sources"))
- args_source = config.VARS.application
- args_source += " --products "
- args_source += ",".join(l_prod_names)
- sat.source(args_source, batch=True, verbose=0, logger_add_link = logger)
-
- # make the new archives
- d_archives_vcs = {}
- for pn, pinfo in l_pinfo_vcs:
- path_archive = make_archive(pn, pinfo, tmp_working_dir)
- d_archives_vcs[pn] = (path_archive,
- os.path.join(ARCHIVE_DIR, pn + ".tgz"))
+ if False:
+ # clean
+ logger.write(_("\nclean sources\n"))
+ args_clean = config.VARS.application
+ args_clean += " --sources --products "
+ args_clean += ",".join(l_prod_names)
+ logger.write("WARNING: get_archives_vcs clean\n '%s'\n" % args_clean, 1)
+ sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
+ if True:
+ # source
+ logger.write(_("get sources\n"))
+ args_source = config.VARS.application
+ args_source += " --products "
+ args_source += ",".join(l_prod_names)
+ svgDir = sat.cfg.APPLICATION.workdir
+ sat.cfg.APPLICATION.workdir = tmp_working_dir
+ # DBG.write("SSS sat config.APPLICATION.workdir", sat.cfg.APPLICATION, True)
+ DBG.write("sat config id", id(sat.cfg), True)
+ # shit as config is not same id() as for sat.source()
+ # sat.source(args_source, batch=True, verbose=5, logger_add_link = logger)
+ import source
+ source.run(args_source, sat, logger) #use this mode as runner.cfg reference
+
+ # make the new archives
+ d_archives_vcs = {}
+ for pn, pinfo in l_pinfo_vcs:
+ path_archive = make_archive(pn, pinfo, tmp_working_dir)
+ logger.write("make archive vcs '%s'\n" % path_archive)
+ d_archives_vcs[pn] = (path_archive,
+ os.path.join(ARCHIVE_DIR, pn + ".tgz"))
+ sat.cfg.APPLICATION.workdir = svgDir
+ # DBG.write("END sat config", sat.cfg.APPLICATION, True)
return d_archives_vcs
def make_archive(prod_name, prod_info, where):
if options.sat:
d_files_to_add.update({"salomeTools" : (runner.cfg.VARS.salometoolsway, "")})
- DBG.write("config for package %s" % project_name, runner.cfg)
-
+
if options.project:
+ DBG.write("config for package %s" % project_name, runner.cfg)
d_files_to_add.update(project_package(runner.cfg, project_name, options.project_file_path, tmp_working_dir, logger))
if not(d_files_to_add):
tar.close()
except KeyboardInterrupt:
logger.write(src.printcolors.printcError("\nERROR: forced interruption\n"), 1)
- logger.write(_("Removing the temporary working directory ... "), 1)
+ logger.write(_("Removing the temporary working directory '%s'... ") % tmp_working_dir, 1)
# remove the working directory
shutil.rmtree(tmp_working_dir)
logger.write(_("OK"), 1)
logger.write(_("\n"), 1)
return 1
- # remove the working directory
- shutil.rmtree(tmp_working_dir)
+ # remove the working directory
+ DBG.tofix("make shutil.rmtree(tmp_working_dir) effective", "", True)
+ # shutil.rmtree(tmp_working_dir)
# Print again the path of the package
logger.write("\n", 2)
import src
import prepare
+import src.debug as DBG
# Define all possible option for patch command : sat patch <options>
parser = src.options.Options()
max_product_name_len = max(map(lambda l: len(l), products[0])) + 4
# The loop on all the products from which to get the sources
+ # DBG.write("source.get_all_product_sources config id", id(config), True)
for product_name, product_info in products:
+ print "get_all_product_sources", product_name #, product_info
# get product name, product informations and the directory where to put
# the sources
if (not (src.product.product_is_fixed(product_info) or
# the product is not in development mode
is_dev = src.product.product_is_dev(product_info)
if source_dir.exists():
- logger.write('%s ' % src.printcolors.printc(src.OK_STATUS),
- 3,
- False)
- msg = _("INFORMATION : Not doing anything because the source"
- " directory already exists.\n")
+ logger.write('%s ' % src.printcolors.printc(src.OK_STATUS), 3, False)
+ msg = _("INFO : Not doing anything because the source directory already exists:\n %s\n") % source_dir
logger.write(msg, 3)
good_result = good_result + 1
# Do not get the sources and go to next product
def run(args, runner, logger):
'''method that is called when salomeTools is called with source parameter.
'''
+ DBG.write("source.run()", args, True)
# Parse the options
(options, args) = parser.parse_args(args)
This file assume DEBUG functionalities use
- print debug messages in sys.stderr for salomeTools
- show pretty print debug representation from instances of SAT classes
- (pretty print src.pyconf.Config)
+ (pretty print src.pyconf.Config), and python dict/list etc. (as 'aVariable')
-WARNING: supposedly show messages in SAT development phase, not production
+WARNING: obviously supposedly show messages in SAT development phase, not production
usage:
>> import debug as DBG
>> DBG.write("aTitle", aVariable) # not shown in production
->> DBG.write("aTitle", aVariable, True) # unconditionaly shown
+>> DBG.write("aTitle", aVariable, True) # unconditionaly shown (as show=True)
+
+to set show message as development phase:
+>> DBG.push_debug(True)
+
+to set no show message as production phase:
+>> DBG.push_debug(False)
+
+to set show message temporary as development phase, only in a method:
+>> def aMethodToDebug(...):
+>> DBG.push_debug(True) #force show as appended status
+>> etc. method code with some DBG.write()
+>> DBG.pop_debug(False) #restore previous status (show or not show)
+>> return
+
+to set a message for future fix, as temporary problem to not forget:
+DBG.tofix("aTitle", aVariable, True/False) #True/False in production shown, or not
"""
import os
def write(title, var="", force=None, fmt="\n#### DEBUG: %s:\n%s\n"):
"""write sys.stderr a message if _debug[-1]==True or optionaly force=True"""
if _debug[-1] or force:
- if 'src.pyconf.Config' in str(type(var)):
+ if 'src.pyconf.' in str(type(var)):
sys.stderr.write(fmt % (title, indent(getStrConfigDbg(var))))
elif type(var) is not str:
sys.stderr.write(fmt % (title, indent(PP.pformat(var))))
def tofix(title, var="", force=None):
"""
write sys.stderr a message if _debug[-1]==True or optionaly force=True
- use this only if no logger accessible for classic logger.warning(message)
+ use this only if no logger accessible for classic
+ logger.warning(message) or logger.debug(message)
"""
fmt = "\n#### TOFIX: %s:\n%s\n"
write(title, var, force, fmt)
prod_pyconf_path = src.find_file_in_lpath(product_name + ".pyconf",
config.PATHS.PRODUCTPATH)
if not prod_pyconf_path:
- msg = _("No definition found for the product %s\n"
- "Please create a %s.pyconf file." % (product_name, product_name))
+ msg = _("""\
+No definition found for the product %(1)s.
+Please create a %(2)s.pyconf file somewhere in:
+%(3)s""") % {
+ "1": product_name,
+ "2": product_name,
+ "3": config.PATHS.PRODUCTPATH }
else:
- msg = _("No definition corresponding to the version %(version)s was"
- " found in the file %(prod_file_path)s.\nPlease add a "
- "section in it." % {"version" : vv,
- "prod_file_path" : prod_pyconf_path} )
+ msg = _("""\
+No definition corresponding to the version %(1)s was found in the file:
+ %(2)s.
+Please add a section in it.""") % {"1" : vv, "2" : prod_pyconf_path}
raise src.SatException(msg)
# Set the debug, dev and version keys
arch_path = src.find_file_in_lpath(arch_name,
config.PATHS.ARCHIVEPATH)
if not arch_path:
- msg = _("Archive %(arch_name)s for %(prod_name)s not found:"
- "\n" % {"arch_name" : arch_name,
- "prod_name" : prod_info.name})
+ msg = _("Archive %(1)s for %(2)s not found.\n") % \
+ {"1" : arch_name, "2" : prod_info.name}
raise src.SatException(msg)
prod_info.archive_info.archive_name = arch_path
else:
arch_name,
config.PATHS.ARCHIVEPATH)
if not arch_path:
- msg = _("Archive %(arch_name)s for %(prod_name)s not found:"
- "\n" % {"arch_name" : arch_name,
- "prod_name" : prod_info.name})
+ msg = _("Archive %(1)s for %(2)s not found:\n") % \
+ {"1" : arch_name, "2" : prod_info.name}
raise src.SatException(msg)
prod_info.archive_info.archive_name = arch_path
if product_has_script(prod_info):
# Check the compil_script key existence
if "compil_script" not in prod_info:
- msg = _("No compilation script found for the product %s\n"
- "Please provide a \"compil_script\" key in its definition."
- % (product_name))
+ msg = _("""\
+No compilation script found for the product %s.
+Please provide a 'compil_script' key in its definition.""") % product_name
raise src.SatException(msg)
# Get the path of the script
config.PATHS.PRODUCTPATH,
"compil_scripts")
if not script_path:
- raise src.SatException(_("Compilation script not found: %s") %
- script_name)
+ raise src.SatException(
+ _("Compilation script not found: %s") % script_name)
prod_info.compil_script = script_path
if src.architecture.is_windows():
prod_info.compil_script = prod_info.compil_script[:-len(".sh")] + ".bat"
# Check that the script is executable
if not os.access(prod_info.compil_script, os.X_OK):
- raise src.SatException(
- _("Compilation script cannot be executed: %s") %
- prod_info.compil_script)
+ #raise src.SatException(
+ # _("Compilation script cannot be executed: %s") %
+ # prod_info.compil_script)
+ print("WARNING: Compilation script cannot be executed:\n %s" % prod_info.compil_script)
# Get the full paths of all the patches
if product_has_patches(prod_info):