logger.warning(msg)
# generate the launch file
- retcode = generate_launch_file(config,
- appli_dir,
- catalog,
- logger,
- SALOME_modules)
+ retcode = generate_launch_file(config, appli_dir, catalog, logger, SALOME_modules)
if retcode == 0:
cmd = UTS.label("%s/salome" % appli_dir)
for product_name in config.APPLICATION.products.keys():
logger.info("%s\n" % product_name)
- return RCO.ReturnCode("OK")
+ return RCO.ReturnCode("OK", "config command done")
# Check that the command has been called with an application
src.check_config_has_application(config)
- logger.write(_('Generation of SALOME modules for application %s\n') % \
- UTS.label(config.VARS.application), 1)
+ logger.info( _('Generation of SALOME modules for application %s\n') % \
+ UTS.label(config.VARS.application) )
status = src.KO_STATUS
# verify that YACSGEN is available
- yacsgen_dir = check_yacsgen(config, options.yacsgen, logger)
-
- if isinstance(yacsgen_dir, tuple):
- # The check failed
- __, error = yacsgen_dir
- msg = _("check yacsgen: %s\n") % error
- logger.error(msg)
- return 1
+ returnCode = check_yacsgen(config, options.yacsgen, logger)
+ if not returnCode.isOk():
+ logger.error(returnCode.getWhy())
+ return returnCode
+ else:
+ yacsgen_dir = returnCode.getValue()
+
# Make the generator module visible by python
sys.path.insert(0, yacsgen_dir)
-
+
logger.info(" insert directory PATH %s = %s\n" % \
- ("YACSGEN", UTS.blue(yacsgen_dir)
+ ("YACSGEN", UTS.blue(yacsgen_dir)) )
products = config.APPLICATION.products
if options.products:
nbgen = 0
context = build_context(config, logger)
+ lprod = UTS.label(product)
for product in products:
- header = _("Generating %s") % UTS.label(product)
+ header = _("Generating %s") % lprod
header += " %s " % ("." * (20 - len(product)))
- logger.write(header, 3)
- logger.flush()
+ logger.info(header)
if product not in config.PRODUCTS:
- logger.write(_("Unknown product\n"), 3, False)
+ logger.error(_("Unknown product %s") % lprod)
continue
pi = src.product.get_product_config(config, product)
if not src.product.product_is_generated(pi):
- logger.write(_("not a generated product\n"), 3, False)
+ logger.info(_("not a generated product %s") % lprod)
continue
nbgen += 1
result = _("ERROR: %s") % result
details.append([product, result])
- if len(details) == 0:
- status = src.OK_STATUS
- else: #if config.USER.output_level != 3:
- logger.write("\n", 2, False)
- logger.write(_("The following modules were not generated correctly:\n"), 2)
+ if len(details) != 0:
+ msg = _("The following modules were not generated correctly:\n")
for d in details:
- logger.write(" %s: %s\n" % (d[0], d[1]), 2, False)
- logger.write("\n", 2, False)
+ msg += " %s: %s\n" % (d[0], d[1])
+ logger.error(msg)
+ return RCO.ReturnCode("KO", msg)
+ else:
+ return RCO.ReturnCode("OK", "generate command done")
- if status == src.OK_STATUS:
- return 0
- return len(details)
-
def generate_component_list(config, product_info, context, logger):
res = "?"
# delete previous generated directory if it already exists
if os.path.exists(compo_info.source_dir):
- logger.write(" delete %s\n" % compo_info.source_dir, 4)
+ logger.debug(" delete %s" % compo_info.source_dir)
shutil.rmtree(compo_info.source_dir)
# generate generates in the current directory => change for generate dir
val = os.getenv(prod_env)
if os.getenv(prod_env) is None:
if p not in config.APPLICATION.products:
- warn = _("product %(product)s is not defined. Include it in the"
- " application or define $%(env)s.") % \
- { "product": p, "env": prod_env}
- logger.write(UTS.red(warn), 1)
- logger.write("\n", 3, False)
+ msg = _("product %s is not defined. Include it in the application or define $%s.") % \
+ (p, prod_env)
+ logger.error(UTS.red(msg))
val = ""
val = ctxenv.environ.environ[prod_env]
dicdir[p] = val
:param config Config: The global configuration.
:param directory str: The directory given by option --yacsgen
:param logger Logger: The logger instance
- :return: The path to yacsgen directory
- :rtype: str
+ :return: RCO.ReturnCode with value The path to yacsgen directory if ok
"""
# first check for YACSGEN (command option, then product, then environment)
yacsgen_dir = None
yacs_src = _("Using YACSGEN from environment")
if yacsgen_dir is None:
- return (False, _("The generate command requires YACSGEN."))
+ RCO.ReturnCode("KO", _("The generate command requires YACSGEN."))
- logger.write(" %s\n" % yacs_src, 2, True)
- logger.write(" %s\n" % yacsgen_dir, 5, True)
+ logger.info(" %s in %s" % (yacs_src, yacsgen_dir)
if not os.path.exists(yacsgen_dir):
- message = _("YACSGEN directory not found: '%s'") % yacsgen_dir
- return (False, _(message))
+ msg = _("YACSGEN directory not found: '%s'") % yacsgen_dir
+ RCO.ReturnCode("KO", msg)
# load module_generator
c = check_module_generator(yacsgen_dir)
if c is not None:
- return c
+ return RCO.ReturnCode("OK", "check_module_generator on %s" % yacsgen_dir, c)
pv = os.getenv("PYTHON_VERSION")
if pv is None:
python_info = src.product.get_product_config(config, "Python")
pv = '.'.join(python_info.version.split('.')[:2])
assert pv is not None, "$PYTHON_VERSION not defined"
- yacsgen_dir = os.path.join(yacsgen_dir, "lib", "python%s" % pv,
- "site-packages")
+ yacsgen_dir = os.path.join(yacsgen_dir, "lib", "python%s" % pv, "site-packages")
c = check_module_generator(yacsgen_dir)
if c is not None:
- return c
+ return RCO.ReturnCode("OK", "check_module_generator on %s" % yacsgen_dir, c)
- return (False,
- _("The python module module_generator was not found in YACSGEN"))
+ return RCO.ReturnCode("KO", _("The python module module_generator was not found in YACSGEN"))
options = self.getOptions()
# Print some informations
- logger.write(_('Local Settings of SAT %s\n\n') % \
- UTS.label(config.VARS.salometoolsway), 1)
+ logger.info(_('Local Settings of SAT %s') % UTS.label(config.VARS.salometoolsway))
res = 0
except Exception as e:
err = str(e)
msg = _("Unable to update the local.pyconf file: %s\n") % err
- logger.write(msg, 1)
- return 1
+ logger.error(msg)
+ return RCO.ReturnCode("KO", msg)
- return 0
+ return RCO.ReturnCode("OK")
def display_local_values(config, logger):
""" Display the base path
found = True
break
if not found:
- msg = _("Impossible to find the job %s in %s\n" % \
- (options.job, file_jobs_cfg)
+ msg = _("Impossible to find the job %s in %s\n") % (options.job, file_jobs_cfg)
logger.error(msg)
return 1
"." * (len_max_command - len(command)) + " ")
error = ""
- stack = ""
# Execute the command
code = sat_command(end_cmd,
options = options,
else:
if sat_command_name != "test":
res = 1
- logger.write('<KO>: %s\n' % error)
-
- if len(stack) > 0:
- logger.write('stack: %s\n' % stack, 3)
+ logger.info('<KO>: %s\n' % error)
# Print the final state
if res == 0:
- final_status = "<OK>"
+ final_status = "OK"
else:
- final_status = "<KO>"
-
- logger.info(_("\nCommands: %s (%d/%d)\n") % \
- (final_status, nb_pass, len(commands)))
-
- return res
+ final_status = "KO"
+
+ msg = "Commands: <%s> (%d/%d)" % (final_status, nb_pass, len(commands))
+ logger.info(msg)
+ return RCO.ReturnCode(final_status, msg)
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
-from src.salomeTools import _BaseCommand
import src.pyconf as PYCONF
+import src.xmlManager as XMLMGR
+from src.salomeTools import _BaseCommand
STYLESHEET_GLOBAL = "jobs_global_report.xsl"
STYLESHEET_BOARD = "jobs_board_report.xsl"
if not f.endswith('.pyconf'):
continue
cfilename = f[:-7]
- logger.write("%s\n" % cfilename)
- return 0
+ logger.info("%s\n" % cfilename)
+ return RCO.ReturnCode("OK", "jobs command done")
# Make sure the jobs_config option has been called
if not options.jobs_cfg:
- message = _("The option --jobs_config is required\n")
+ msg = _("The option --jobs_config is required\n")
logger.error(message)
- return 1
+ return RCO.ReturnCode("KO", msg)
# Find the file in the directories, unless it is a full path
# merge all in a config
The file configuration %s was not found.
Use the --list option to get the possible files.\n""") % config_file
logger.error(msg)
- return 1
+ return RCO.ReturnCode("KO", msg)
l_conf_files_path.append(file_jobs_cfg)
# Read the config that is in the file
one_config_jobs = src.read_config_from_a_file(file_jobs_cfg)
logger.txtFileName))
# Initialization
- today_jobs = Jobs(runner,
- logger,
- path_pyconf,
- config_jobs)
+ today_jobs = Jobs(runner, logger, path_pyconf, config_jobs)
# SSH connection to all machines
today_jobs.ssh_connection_all_machines()
if options.test_connection:
- return 0
+ return RCO.ReturnCode("OK", "jobs ssh_connection done")
gui = None
if options.publish:
- logger.write(UTS.info(
- _("Initialize the xml boards : ")), 5)
- logger.flush()
-
+ logger.debug(_("Initialize the xml boards : "))
+
# Copy the stylesheets in the log directory
log_dir = log_dir
xsl_dir = os.path.join(config.VARS.srcDir, 'xsl')
logger.debug("<OK>\n\n")
# Display the list of the xml files
- logger.write(UTS.info(("Here is the list of published"
- " files :\n")), 4)
- logger.write("%s\n" % gui.xml_global_file.logFile, 4)
+ logger.info(("List of published files:\n%s\n") % gui.xml_global_file.logFile)
+ msg = ""
for board in gui.d_xml_board_files.keys():
file_path = gui.d_xml_board_files[board].logFile
file_name = os.path.basename(file_path)
- logger.write("%s\n" % file_path, 4)
+ msg += "%s\n" % file_path
logger.add_link(file_name, "board", 0, board)
- logger.write("\n", 4)
+ logger.info(msg)
today_jobs.gui = gui
today_jobs.run_jobs()
except KeyboardInterrupt:
interruped = True
- logger.critical(UTS.red(_("KeyboardInterrupt forced interruption\n"))
+ logger.critical(UTS.red(_("KeyboardInterrupt forced interruption")))
except Exception as e:
# verbose debug message with traceback
- msg = _("Exception raised, the jobs loop has been interrupted:\n\n%s\n")
- import traceback
- logger.critical( msg % UTS.yellow(traceback.format_exc()))
-
+ msg = _("Exception raised, the jobs loop has been interrupted:\n\n%s")
+ logger.critical(msg % UTS.yellow(traceback.format_exc()))
finally:
- res = 0
+ # make clear kill subprocess
+ res = RCO.ReturnCode("OK", "jobs command finally done")
if interruped:
- res = 1
msg = _("Killing the running jobs and trying to get the corresponding logs\n")
- logger.write(UTS.red(msg))
+ logger.warning(UTS.red(msg))
+ res = RCO.ReturnCode("KO", msg)
# find the potential not finished jobs and kill them
for jb in today_jobs.ljobs:
if not jb.has_finished():
- res = 1
+ res += RCO.ReturnCode("KO", "job %s has not finished" % jb.name)
try:
jb.kill_remote_process()
except Exception as e:
- msg = _("Failed to kill job %(1)s: %(2)s\n") % {"1": jb.name, "2": e}
- logger.write(UTS.red(msg))
+ msg = _("Failed to kill job %s: %s\n") % (jb.name, e)
+ logger.warning(UTS.red(msg))
+ res += RCO.ReturnCode("KO", msg)
if jb.res_job != "0":
- res = 1
+ res += RCO.ReturnCode("KO", "job %s fail" % jb.name)
if interruped:
if today_jobs.gui:
today_jobs.gui.last_update(_("Forced interruption"))
return message
def successfully_connected(self, logger):
- '''Verify if the connection to the remote machine has succeed
+ """\
+ Verify if the connection to the remote machine has succeed
:param logger src.logger.Logger: The logger instance
:return: True if the connection has succeed, False if not
:rtype: bool
- '''
+ """
if self._connection_successful == None:
message = _("""\
-WARNING : trying to ask if the connection to
- (name: %(1)s host: %(2)s, port: %(3)s, user: %(4)s) is OK
- whereas there were no connection request""" %
- {"1": self.name, "2": self.host, "3": self.port, "4": self.user} )
- logger.write( UTS.red(message))
+Ask if the connection
+(name: %(1)s host: %(2)s, port: %(3)s, user: %(4)s) is OK
+whereas there were no connection request""" % \
+ {"1": self.name, "2": self.host, "3": self.port, "4": self.user} )
+ logger.critical(UTS.red(message))
return self._connection_successful
def copy_sat(self, sat_local_path, job_file):
- '''Copy salomeTools to the remote machine in self.sat_path
- '''
+ """Copy salomeTools to the remote machine in self.sat_path"""
res = 0
try:
# open a sftp connection
# put the job configuration file in order to make it reachable
# on the remote machine
remote_job_file_name = ".%s" % os.path.basename(job_file)
- self.sftp.put(job_file, os.path.join(self.sat_path,
- remote_job_file_name))
+ self.sftp.put(job_file, os.path.join(self.sat_path, remote_job_file_name))
except Exception as e:
res = str(e)
self._connection_successful = False
self.ssh.close()
def write_info(self, logger):
- '''Prints the informations relative to the machine in the logger
- (terminal traces and log file)
+ """\
+ Prints the informations relative to the machine in the logger
+ (terminal traces and log file)
:param logger src.logger.Logger: The logger instance
:return: Nothing
:rtype: N\A
- '''
- logger.write("host : " + self.host + "\n")
- logger.write("port : " + str(self.port) + "\n")
- logger.write("user : " + str(self.user) + "\n")
+ """
if self.successfully_connected(logger):
- status = src.OK_STATUS
+ msg = "<OK>"
else:
- status = src.KO_STATUS
- logger.write("Connection : " + status + "\n\n")
+ msg = "<KO>"
+ msg += "host: %s, " % self.host
+ msg += "port: %s, " % str(self.port)
+ msg += "user: %s" % str(self.user)
+ logger.info("Connection %s" % msg )
class Job(object):
- '''Class to manage one job
- '''
+ """\
+ Class to manage one job
+ """
def __init__(self,
name,
machine,
return self._has_finished
def get_log_files(self):
- """Get the log files produced by the command launched
- on the remote machine, and put it in the log directory of the user,
- so they can be accessible from
+ """\
+ Get the log files produced by the command launched
+ on the remote machine, and put it in the log directory of the user,
+ so they can be accessible from
"""
# Do not get the files if the command is not finished
if not self.has_finished():
msg = _("Trying to get log files whereas the job is not finished.")
- self.logger.write(UTS.red(msg))
+ self.logger.warning(UTS.red(msg))
return
# First get the file that contains the list of log files to get
tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt")
- self.machine.sftp.get(
- remote_path,
- tmp_file_path)
+ self.machine.sftp.get(remote_path, tmp_file_path)
# Read the file and get the result of the command and all the log files
# to get
# Prevent multiple run
if self.has_begun():
- msg = _("WARNING: A job can only be launched one time")
- msg2 = _("Trying to launch the job \"%s\" whereas it has "
- "already been launched.") % self.name
- self.logger.write(
- UTS.red("%s\n%s\n" % (msg,msg2)) )
- return
+ msg = _("A job can only be launched one time")
+ msg2 = _("Trying to launch the job '%s' whereas it has already been launched.") % self.name
+ self.logger.warning( UTS.red("%s\n%s\n" % (msg,msg2)) )
+ return RCO.ReturnCode("KO", msg2)
# Do not execute the command if the machine could not be reached
if not self.machine.successfully_connected(self.logger):
"""\
Display on the terminal all the job's information
"""
- self.logger.write("name : " + self.name + "\n")
- if self.after:
- self.logger.write("after : %s\n" % self.after)
- self.logger.write("Time elapsed : %4imin %2is \n" %
- (self.total_duration()//60 , self.total_duration()%60))
+ msg = "name : %s\n" % self.name
+ if self.after:
+ msg += "after : %s\n" % self.after
+ msg += "Time elapsed : %4imin %2is \n" % (self.total_duration()//60 , self.total_duration()%60)
if self._T0 != -1:
- self.logger.write("Begin time : %s\n" %
- time.strftime('%Y-%m-%d %H:%M:%S',
- time.localtime(self._T0)) )
+ msg += "Begin time : %s\n" %
+ time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._T0))
if self._Tf != -1:
- self.logger.write("End time : %s\n\n" %
- time.strftime('%Y-%m-%d %H:%M:%S',
- time.localtime(self._Tf)) )
+ msg += "End time : %s\n\n" %
+ time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._Tf))
+
+ self.logger.info(msg)
machine_head = "Informations about connection :\n"
underline = (len(machine_head) - 2) * "-"
- self.logger.write(UTS.info(
- machine_head+underline+"\n"))
+ self.logger.info(machine_head+underline)
self.machine.write_info(self.logger)
- self.logger.write(UTS.info("out : \n"))
+ msg = "out : \n"
if self.out == "":
- self.logger.write("Unable to get output\n")
+ msg += "Unable to get output\n"
else:
- self.logger.write(self.out + "\n")
- self.logger.write(UTS.info("err : \n"))
- self.logger.write(self.err + "\n")
+ msg += self.out + "\n"
+ msg += "err :\n%s\n" % .err
+ self.logger.info(msg)
def get_status(self):
"""\
if self.cancelled:
return "Cancelled"
if self.is_running():
- return "running since " + time.strftime('%Y-%m-%d %H:%M:%S',
- time.localtime(self._T0))
+ return "running since " + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._T0))
if self.has_finished():
if self.is_timeout():
- return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S',
- time.localtime(self._Tf))
- return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S',
- time.localtime(self._Tf))
+ return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._Tf))
+ return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._Tf))
class Jobs(object):
"""\
if not "machine" in job_def:
msg = _("""\
-WARNING: The job '%s' do not have the key 'machine'.
- This job is ignored.\n""") % job_def.name
- self.logger.write(UTS.red(msg))
+The job '%s' do not have the key 'machine'.
+This job is ignored.
+""") % job_def.name
+ self.logger.warning(msg)
continue
name_machine = job_def.machine
if a_machine == None:
msg = _("""\
-WARNING: The job '%(job)s' requires the machine '%(machine)s'.
- This machine is not defined in the configuration file.
- The job will not be launched.
+The job '%(job)s' requires the machine '%(machine)s'.
+This machine is not defined in the configuration file.
+The job will not be launched.
""") % {"job" : job_def.name, "machine" : name_machine}
- self.logger.write(UTS.red(msg))
+ self.logger.warning(msg)
continue
a_job = self.define_job(job_def, a_machine)
self.lhosts = host_list
def ssh_connection_all_machines(self, pad=50):
- '''Function that do the ssh connection to every machine
- to be used today.
+ """\
+ Do the ssh connection to every machine to be used today.
:return: Nothing
:rtype: N\A
- '''
- self.logger.write(UTS.info((
- "Establishing connection with all the machines :\n")))
+ """
+ self.logger.info( "Establishing connection with all the machines :\n")
for machine in self.lmachines:
# little algorithm in order to display traces
begin_line = (_("Connection to %s: ") % machine.name)
endline = (pad - len(begin_line)) * "." + " "
step = "SSH connection"
- self.logger.write( begin_line + endline + step)
- self.logger.flush()
+ self.logger.info( begin_line + endline + step)
# the call to the method that initiate the ssh connection
msg = machine.connect(self.logger)
# Copy salomeTools to the remote machine
if machine.successfully_connected(self.logger):
step = _("Remove SAT")
- self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
- self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
+ self.logger.info('\r%s%s%s' % (begin_line, endline, 20 * " "))
+ self.logger.info('\r%s%s%s' % (begin_line, endline, step))
(__, out_dist, __) = machine.exec_command(
- "rm -rf %s" % machine.sat_path,
- self.logger)
+ "rm -rf %s" % machine.sat_path, self.logger)
out_dist.read()
- self.logger.flush()
step = _("Copy SAT")
- self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
- self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
- self.logger.flush()
+ self.logger.info('\r%s%s%s' % (begin_line, endline, 20 * " "))
+ self.logger.info('\r%s%s%s' % (begin_line, endline, step))
+
res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway,
self.job_file_path)
# Print the status of the copy
if res_copy == 0:
- self.logger.write('\r%s' % \
- ((len(begin_line)+len(endline)+20) * " "), 3)
+ self.logger.info('\r%s' % \
+ ((len(begin_line)+len(endline)+20) * " "))
self.logger.info('\r%s%s%s' % (begin_line, endline, "<OK>"))
else:
- self.logger.write('\r%s' % \
+ self.logger.info('\r%s' % \
((len(begin_line)+len(endline)+20) * " "), 3)
self.logger.info('\r%s%s%s %s' % \
(begin_line, endline, "<KO>",
_("Copy of SAT failed: %s") % res_copy))
else:
- self.logger.write('\r%s' %
- ((len(begin_line)+len(endline)+20) * " "), 3)
- self.logger.write('\r%s%s%s %s' % (begin_line, endline, "<KO>", msg))
+ self.logger.info('\r%s' %
+ ((len(begin_line)+len(endline)+20) * " "))
+ self.logger.info('\r%s%s%s %s' % (begin_line, endline, "<KO>", msg))
self.logger.info("\n")
self.logger.info("\n")
return text_out
def display_status(self, len_col):
- '''Takes a lenght and construct the display of the current status
- of the jobs in an array that has a column for each host.
- It displays the job that is currently running on the host
- of the column.
+ """\
+ Takes a lenght and construct the display of the current status
+ of the jobs in an array that has a column for each host.
+ It displays the job that is currently running on the host of the column.
:param len_col int: the size of the column
:return: Nothing
:rtype: N\A
- '''
-
+ """
display_line = ""
for host_port in self.lhosts:
jb = self.is_occupied(host_port)
display_line += "|" + UTS.info(
self.str_of_length(jb.name, len_col))
- self.logger.write("\r" + display_line + "|")
- self.logger.flush()
+ self.logger.info("\r" + display_line + "|")
def run_jobs(self):
- '''The main method. Runs all the jobs on every host.
- For each host, at a given time, only one job can be running.
- The jobs that have the field after (that contain the job that has
- to be run before it) are run after the previous job.
- This method stops when all the jobs are finished.
+ """\
+ The main method. Runs all the jobs on every host.
+ For each host, at a given time, only one job can be running.
+ The jobs that have the field after (that contain the job that has
+ to be run before it) are run after the previous job.
+ This method stops when all the jobs are finished.
:return: Nothing
:rtype: N\A
- '''
-
+ """
# Print header
- self.logger.write(
- UTS.info(_('Executing the jobs :\n')) )
+ self.logger.info(_('Executing the jobs :\n'))
text_line = ""
for host_port in self.lhosts:
host = host_port[0]
"("+host+", "+str(port)+")", self.len_columns)
tiret_line = " " + "-"*(len(text_line)-1) + "\n"
- self.logger.write(tiret_line)
- self.logger.write(text_line + "|\n")
- self.logger.write(tiret_line)
- self.logger.flush()
+ self.logger.info(tiret_line + text_line + "|\n" + tiret_line)
# The infinite loop that runs the jobs
l_jobs_not_started = src.deepcopy_list(self.ljobs)
# Make sure that the proc is not entirely busy
time.sleep(0.001)
- self.logger.write("\n")
- self.logger.write(tiret_line)
- self.logger.write("\n\n")
+ self.logger.info("\n" + tiret_line + "\n\n")
if self.gui:
self.gui.update_xml_files(self.ljobs)
self.gui.last_update()
def write_all_results(self):
- '''Display all the jobs outputs.
+ """\
+ Display all the jobs outputs.
:return: Nothing
:rtype: N\A
- '''
-
+ """
for jb in self.ljobs:
- self.logger.write(UTS.label(
- "#------- Results for job %s -------#\n" % jb.name))
+ self.logger.info("#------- Results for job %s -------#\n" % jb.name)
jb.write_results()
- self.logger.write("\n\n")
+ self.logger.info("\n\n")
class Gui(object):
- '''Class to manage the the xml data that can be displayed in a browser to
- see the jobs states
- '''
-
+ """\
+ Class to manage the the xml data that can be displayed in a browser
+ to see the jobs states
+ """
def __init__(self,
xml_dir_path,
l_jobs,
prefix,
logger,
file_boards=""):
- '''Initialization
+ """\
+ Initialization
- :param xml_dir_path str: The path to the directory where to put
- the xml resulting files
+ :param xml_dir_path str: The path to the directory where to put the xml resulting files
:param l_jobs List: the list of jobs that run today
:param l_jobs_not_today List: the list of jobs that do not run today
- :param file_boards str: the file path from which to read the
- expected boards
- '''
+ :param file_boards str: the file path from which to read the expected boards
+ """
# The logging instance
self.logger = logger
self.global_name = "global_report"
xml_global_path = os.path.join(self.xml_dir_path,
self.global_name + ".xml")
- self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path,
- "JobsReport")
+ self.xml_global_file = XMLMGR.XmlLogFile(xml_global_path, "JobsReport")
# Find history for each job
self.history = {}
:param name str: the board name
'''
xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
- self.d_xml_board_files[name] = src.xmlManager.XmlLogFile(
- xml_board_path,
- "JobsReport")
+ self.d_xml_board_files[name] = XMLMGR.XmlLogFile(xml_board_path,"JobsReport")
self.d_xml_board_files[name].add_simple_node("distributions")
self.d_xml_board_files[name].add_simple_node("applications")
self.d_xml_board_files[name].add_simple_node("board", text=name)
def initialize_boards(self, l_jobs, l_jobs_not_today):
- '''Get all the first information needed for each file and write the
- first version of the files
+ """\
+ Get all the first information needed for each file and write the
+ first version of the files
+
:param l_jobs List: the list of jobs that run today
:param l_jobs_not_today List: the list of jobs that do not run today
- '''
+ """
# Get the boards to fill and put it in a dictionary
# {board_name : xml instance corresponding to the board}
for job in l_jobs + l_jobs_not_today:
if board not in self.d_xml_board_files:
self.add_xml_board(board)
root_node = self.d_xml_board_files[board].xmlroot
- src.xmlManager.append_node_attrib(root_node,
- {"input_file" : self.file_boards})
+ XMLMGR.append_node_attrib(root_node, {"input_file" : self.file_boards})
# Loop over all jobs in order to get the lines and columns for each
# xml file
d_application[board] = []
l_hosts_ports = []
+
+ ASNODE = XMLMGR.add_simple_node # shortcut
for job in l_jobs + l_jobs_not_today:
if (distrib not in [None, ''] and
distrib not in d_dist[board]):
d_dist[board].append(distrib)
- src.xmlManager.add_simple_node(
- self.d_xml_board_files[board].xmlroot.find(
- 'distributions'),
- "dist",
- attrib={"name" : distrib})
+ ASNODE( self.d_xml_board_files[board].xmlroot.find('distributions'),
+ "dist", attrib={"name" : distrib} )
if board_job == board:
if (application not in [None, ''] and
application not in d_application[board]):
d_application[board].append(application)
- src.xmlManager.add_simple_node(
- self.d_xml_board_files[board].xmlroot.find(
- 'applications'),
- "application",
- attrib={
- "name" : application})
+ ASNODE( self.d_xml_board_files[board].xmlroot.find('applications'),
+ "application", attrib={"name" : application} )
# Verify that there are no missing application or distribution in the
# xml board files (regarding the input boards)
continue
for dist in self.d_input_boards[board]["rows"]:
if dist not in l_dist:
- src.xmlManager.add_simple_node(
- self.d_xml_board_files[board].xmlroot.find(
- 'distributions'),
- "dist",
- attrib={"name" : dist})
+ ASNODE( self.d_xml_board_files[board].xmlroot.find('distributions'),
+ "dist", attrib={"name" : dist} )
l_appli = d_application[board]
for appli in self.d_input_boards[board]["columns"]:
if appli not in l_appli:
- src.xmlManager.add_simple_node(
- self.d_xml_board_files[board].xmlroot.find(
- 'applications'),
- "application",
- attrib={"name" : appli})
+ ASNODE( self.d_xml_board_files[board].xmlroot.find('applications'),
+ "application", attrib={"name" : appli} )
# Initialize the hosts_ports node for the global file
- self.xmlhosts_ports = self.xml_global_file.add_simple_node(
- "hosts_ports")
+ self.xmlhosts_ports = self.xml_global_file.add_simple_node( "hosts_ports")
for host, port in l_hosts_ports:
host_port = "%s:%i" % (host, port)
- src.xmlManager.add_simple_node(self.xmlhosts_ports,
- "host_port",
- attrib={"name" : host_port})
+ ASNODE(self.xmlhosts_ports, "host_port", attrib={"name" : host_port})
# Initialize the jobs node in all files
- for xml_file in [self.xml_global_file] + list(
- self.d_xml_board_files.values()):
+ for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
xml_jobs = xml_file.add_simple_node("jobs")
# Get the jobs present in the config file but
# that will not be launched today
self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
# add also the infos node
- xml_file.add_simple_node("infos",
- attrib={"name" : "last update",
- "JobsCommandStatus" : "running"})
+ xml_file.add_simple_node(
+ "infos", attrib={"name" : "last update", "JobsCommandStatus" : "running"} )
# and put the history node
history_node = xml_file.add_simple_node("history")
if oExpr.search(file_name):
date = os.path.basename(file_name).split("_")[0]
file_path = os.path.join(self.xml_dir_path, file_name)
- src.xmlManager.add_simple_node(history_node,
- "link",
- text=file_path,
- attrib={"date" : date})
+ ASNODE(history_node, "link", text=file_path, attrib={"date" : date})
# Find in each board the squares that needs to be filled regarding the
for board in self.d_input_boards.keys():
xml_root_board = self.d_xml_board_files[board].xmlroot
# Find the missing jobs for today
- xml_missing = src.xmlManager.add_simple_node(xml_root_board,
- "missing_jobs")
+ xml_missing = ASNODE(xml_root_board, "missing_jobs")
for row, column in self.d_input_boards[board]["jobs"]:
found = False
for job in l_jobs:
found = True
break
if not found:
- src.xmlManager.add_simple_node(xml_missing,
- "job",
- attrib={"distribution" : row,
- "application" : column })
+ ASNODE(xml_missing, "job", attrib={"distribution" : row, "application" : column })
# Find the missing jobs not today
- xml_missing_not_today = src.xmlManager.add_simple_node(
- xml_root_board,
- "missing_jobs_not_today")
+ xml_missing_not_today = ASNODE( xml_root_board, "missing_jobs_not_today")
for row, column in self.d_input_boards[board]["jobs_not_today"]:
found = False
for job in l_jobs_not_today:
found = True
break
if not found:
- src.xmlManager.add_simple_node(xml_missing_not_today,
- "job",
- attrib={"distribution" : row,
- "application" : column })
+ ASNODE( xml_missing_not_today, "job",
+ attrib={"distribution" : row, "application" : column } )
def find_history(self, l_jobs, l_jobs_not_today):
"""find, for each job, in the existent xml boards the results for the
if oExpr.search(file_name):
file_path = os.path.join(self.xml_dir_path, file_name)
try:
- global_xml = src.xmlManager.ReadXmlFile(file_path)
+ global_xml = XMLMGR.ReadXmlFile(file_path)
l_globalxml.append(global_xml)
except Exception as e:
- msg = _("WARNING: the file '%(1)s' can not be read, it will be "
- "ignored\n%(2)s") % {"1": file_path, "2": e}
- self.logger.write("%s\n" % UTS.red(
- msg), 5)
+ msg = _("The file '%s' can not be read, it will be ignored\n%s") % \
+ (file_path, e})
+ self.logger.warning("%s\n" % msg)
# Construct the dictionnary self.history
for job in l_jobs + l_jobs_not_today:
for global_xml in l_globalxml:
date = os.path.basename(global_xml.filePath).split("_")[0]
global_root_node = global_xml.xmlroot.find("jobs")
- job_node = src.xmlManager.find_node_by_attrib(
- global_root_node,
- "job",
- "name",
- job.name)
+ job_node = XMLMGR.find_node_by_attrib(
+ global_root_node, "job", "name", job.name )
if job_node:
if job_node.find("remote_log_file_path") is not None:
link = job_node.find("remote_log_file_path").text
self.history[job.name] = l_links
def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
- '''Get all the first information needed for each file and write the
- first version of the files
+ """\
+ Get all the first information needed for each file and write the
+ first version of the files
:param xml_node_jobs etree.Element: the node corresponding to a job
:param l_jobs_not_today List: the list of jobs that do not run today
- '''
+ """
+
+ ASNODE = XMLMGR.add_simple_node # shortcut
+
for job in l_jobs_not_today:
- xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
- "job",
- attrib={"name" : job.name})
- src.xmlManager.add_simple_node(xmlj, "application", job.application)
- src.xmlManager.add_simple_node(xmlj,
- "distribution",
- job.machine.distribution)
- src.xmlManager.add_simple_node(xmlj, "board", job.board)
- src.xmlManager.add_simple_node(xmlj,
- "commands", " ; ".join(job.commands))
- src.xmlManager.add_simple_node(xmlj, "state", "Not today")
- src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
- src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
- src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
- src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
- src.xmlManager.add_simple_node(xmlj, "sat_path",
- job.machine.sat_path)
- xml_history = src.xmlManager.add_simple_node(xmlj, "history")
+ xmlj = ASNODE(xml_node_jobs, "job", attrib={"name" : job.name})
+ ASNODE(xmlj, "application", job.application)
+ ASNODE(xmlj, "distribution", job.machine.distribution)
+ ASNODE(xmlj, "board", job.board)
+ ASNODE(xmlj, "commands", " ; ".join(job.commands))
+ ASNODE(xmlj, "state", "Not today")
+ ASNODE(xmlj, "machine", job.machine.name)
+ ASNODE(xmlj, "host", job.machine.host)
+ ASNODE(xmlj, "port", str(job.machine.port))
+ ASNODE(xmlj, "user", job.machine.user)
+ ASNODE(xmlj, "sat_path", job.machine.sat_path)
+ xml_history = ASNODE(xmlj, "history")
for i, (date, res_job, link) in enumerate(self.history[job.name]):
if i==0:
# tag the first one (the last one)
- src.xmlManager.add_simple_node(xml_history,
- "link",
- text=link,
- attrib={"date" : date,
- "res" : res_job,
- "last" : "yes"})
+ ASNODE( xml_history, "link", text=link,
+ attrib={"date" : date, "res" : res_job, "last" : "yes"} )
else:
- src.xmlManager.add_simple_node(xml_history,
- "link",
- text=link,
- attrib={"date" : date,
- "res" : res_job,
- "last" : "no"})
+ ASNODE( xml_history, "link", text=link,
+ attrib={"date" : date, "res" : res_job, "last" : "no"} )
def parse_csv_boards(self, today):
""" Parse the csv file that describes the boards to produce and fill
time.localtime(job._Tf))
# recreate the job node
- xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
- "job",
- attrib={"name" : job.name})
- src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
- src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
- src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
- src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
- xml_history = src.xmlManager.add_simple_node(xmlj, "history")
+ xmlj = ASNODE(xml_node_jobs, "job", attrib={"name" : job.name})
+ ASNODE(xmlj, "machine", job.machine.name)
+ ASNODE(xmlj, "host", job.machine.host)
+ ASNODE(xmlj, "port", str(job.machine.port))
+ ASNODE(xmlj, "user", job.machine.user)
+ xml_history = ASNODE(xmlj, "history")
for date, res_job, link in self.history[job.name]:
- src.xmlManager.add_simple_node(xml_history,
- "link",
- text=link,
- attrib={"date" : date,
- "res" : res_job})
+ ASNODE( xml_history, "link", text=link,
+ attrib={"date" : date, "res" : res_job} )
- src.xmlManager.add_simple_node(xmlj, "sat_path",
- job.machine.sat_path)
- src.xmlManager.add_simple_node(xmlj, "application", job.application)
- src.xmlManager.add_simple_node(xmlj, "distribution",
- job.machine.distribution)
- src.xmlManager.add_simple_node(xmlj, "board", job.board)
- src.xmlManager.add_simple_node(xmlj, "timeout", str(job.timeout))
- src.xmlManager.add_simple_node(xmlj, "commands",
- " ; ".join(job.commands))
- src.xmlManager.add_simple_node(xmlj, "state", job.get_status())
- src.xmlManager.add_simple_node(xmlj, "begin", T0)
- src.xmlManager.add_simple_node(xmlj, "end", Tf)
- src.xmlManager.add_simple_node(xmlj, "out", UTS.cleancolor(job.out))
- src.xmlManager.add_simple_node(xmlj, "err", UTS.cleancolor(job.err))
- src.xmlManager.add_simple_node(xmlj, "res", str(job.res_job))
+ ASNODE(xmlj, "sat_path", job.machine.sat_path)
+ ASNODE(xmlj, "application", job.application)
+ ASNODE(xmlj, "distribution", job.machine.distribution)
+ ASNODE(xmlj, "board", job.board)
+ ASNODE(xmlj, "timeout", str(job.timeout))
+ ASNODE(xmlj, "commands", " ; ".join(job.commands))
+ ASNODE(xmlj, "state", job.get_status())
+ ASNODE(xmlj, "begin", T0)
+ ASNODE(xmlj, "end", Tf)
+ ASNODE(xmlj, "out", UTS.cleancolor(job.out))
+ ASNODE(xmlj, "err", UTS.cleancolor(job.err))
+ ASNODE(xmlj, "res", str(job.res_job))
if len(job.remote_log_files) > 0:
- src.xmlManager.add_simple_node(xmlj,
- "remote_log_file_path",
- job.remote_log_files[0])
+ ASNODE(xmlj, "remote_log_file_path", job.remote_log_files[0])
else:
- src.xmlManager.add_simple_node(xmlj,
- "remote_log_file_path",
- "nothing")
+ ASNODE(xmlj, "remote_log_file_path", "nothing")
# Search for the test log if there is any
l_test_log_files = self.find_test_log(job.remote_log_files)
- xml_test = src.xmlManager.add_simple_node(xmlj,
- "test_log_file_path")
+ xml_test = ASNODE(xmlj, "test_log_file_path")
for test_log_path, res_test, nb_fails in l_test_log_files:
- test_path_node = src.xmlManager.add_simple_node(xml_test,
- "path",
- test_log_path)
+ test_path_node = ASNODE(xml_test, "path", test_log_path)
test_path_node.attrib["res"] = res_test
test_path_node.attrib["nb_fails"] = nb_fails
- xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
+ xmlafter = ASNODE(xmlj, "after", job.after)
# get the job father
if job.after is not None:
job_father = None
link = job_father.remote_log_files[0]
else:
link = "nothing"
- src.xmlManager.append_node_attrib(xmlafter, {"link" : link})
+ XMLMGR.append_node_attrib(xmlafter, {"link" : link})
# Verify that the job is to be done today regarding the input csv
# files
if (job.machine.distribution == dist
and job.application == appli):
found = True
- src.xmlManager.add_simple_node(xmlj,
- "extra_job",
- "no")
+ ASNODE(xmlj, "extra_job", "no")
break
if not found:
- src.xmlManager.add_simple_node(xmlj,
- "extra_job",
- "yes")
+ ASNODE(xmlj, "extra_job", "yes")
# Update the date
xml_node_infos = xml_file.xmlroot.find('infos')
- src.xmlManager.append_node_attrib(xml_node_infos,
- attrib={"value" :
- datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
+ XMLMGR.append_node_attrib( xml_node_infos,
+ attrib={"value" : datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")} )
def find_test_log(self, l_remote_log_files):
for file_path in l_remote_log_files:
dirname = os.path.basename(os.path.dirname(file_path))
file_name = os.path.basename(file_path)
- regex = src.logger.log_all_command_file_expression
+ regex = UTS._log_all_command_file_expression
oExpr = re.compile(regex)
if dirname == "TEST" and oExpr.search(file_name):
# find the res of the command
'''
for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
xml_node_infos = xml_file.xmlroot.find('infos')
- src.xmlManager.append_node_attrib(xml_node_infos,
+ XMLMGR.append_node_attrib(xml_node_infos,
attrib={"JobsCommandStatus" : finish_status})
# Write the file
self.write_xml_files()
# Display some information
if display:
# Write the launcher file
- logger.write(_("Generating launcher for %s :\n") %
- UTS.label(config.VARS.application), 1)
- logger.write(" %s\n" % UTS.label(filepath), 1)
+ msg = _("Generating launcher for %s :\n %s\n") % \
+ (UTS.label(config.VARS.application), UTS.label(filepath))
+ logger.info(msg)
# open the file and write into it
launch_file = open(filepath, "w")
import src.debug as DBG
import src.returnCode as RCO
import src.utilsSat as UTS
-from src.salomeTools import _BaseCommand
+import src.xmlManager as XMLMGR
import src.system as SYSS
+from src.salomeTools import _BaseCommand
# Compatibility python 2/3 for input function
# input stays input for python 3 and input = raw_input for python 2
if options.clean:
nbClean = options.clean
# get the list of files to remove
- lLogs = UTS.list_log_file(logDir, UTS.log_all_command_file_expression)
+ lLogs = UTS.list_log_file(logDir, UTS._log_all_command_file_expression)
nbLogFiles = len(lLogs)
# Delete all if the invoked number is bigger than the number of log files
if nbClean > nbLogFiles:
# loop on all files and print it with date, time and command name
for __, date, hour, cmd, cmdAppli in lLogsFiltered:
num = UTS.label("%2d" % (nb_logs - index))
- logger.write("%s: %13s %s %s %s\n" %
- (num, cmd, date, hour, cmdAppli), 1, False)
+ logger.info("%s: %13s %s %s %s\n" % (num, cmd, date, hour, cmdAppli))
index += 1
# ask the user what for what command he wants to be displayed
:param logger Logger: the logger instance to use for the print
'''
if os.path.exists(filePath):
- logger.write(UTS.red("Removing ")
- + filePath + "\n", 5)
+ logger.debug(UTS.red("Removing %s\n" % filePath))
os.remove(filePath)
def print_log_command_in_terminal(filePath, logger):
'''
logger.debug(_("Reading %s\n") % filePath)
# Instantiate the ReadXmlFile class that reads xml files
- xmlRead = src.xmlManager.ReadXmlFile(filePath)
+ xmlRead = XMLMGR.ReadXmlFile(filePath)
# Get the attributes containing the context (user, OS, time, etc..)
dAttrText = xmlRead.get_attrib('Site')
# format dAttrText and print the context
lAttrText = []
for attrib in dAttrText:
lAttrText.append((attrib, dAttrText[attrib]))
- logger.write("\n", 1)
+
UTS.logger_info_tuples(logger, lAttrText)
# Get the traces
command_traces = xmlRead.get_node_text('Log')
# Print it if there is any
if command_traces:
- logger.info(UTS.header(_("Here are the command traces :\n")))
- logger.info(command_traces + "\n" )
+ msg = _("Here are the command traces :\n%s\n") % command_traces
+ logger.info(msg)
def getMaxFormat(aListOfStr, offset=1):
"""returns format for columns width as '%-30s"' for example"""
# list the logs
nb = len(log_dirs)
fmt1, maxLen = getMaxFormat(log_dirs, offset=1)
- fmt2 = "%s: " + fmt1 # "%s: %-30s" for example
+ fmt2 = "%s: " + fmt1 + "\n" # "%s: %-30s\n" for example
nb_cols = 5
# line ~ no more 100 chars
if maxLen > 20: nb_cols = 4
if maxLen > 50: nb_cols = 1
col_size = (nb / nb_cols) + 1
for index in range(0, col_size):
+ msg = ""
for i in range(0, nb_cols):
k = index + i * col_size
if k < nb:
l = log_dirs[k]
str_indice = UTS.label("%2d" % (k+1))
log_name = l
- logger.write(fmt2 % (str_indice, log_name), 1, False)
- logger.write("\n", 1, False)
+ msg += fmt2 % (str_indice, log_name)
+ logger.info(msg + "\n")
# loop till exit
x = -1
opt.append(str(datetime.datetime.fromtimestamp(my_stat[stat.ST_MTIME])))
opt.append("(%8.2f)" % (my_stat[stat.ST_SIZE] / 1024.0))
- logger.write(" %-35s" % " ".join(opt), 1, False)
- logger.write("%s: %-30s\n" % (str_indice, file_name), 1, False)
+ logger.info(" %-35s" % " ".join(opt))
+ logger.info("%s: %-30s\n" % (str_indice, file_name))
# loop till exit
x = -1
import src.debug as DBG
import src.returnCode as RCO
+import src.utilsSat as UTS
from src.salomeTools import _BaseCommand
########################################################################
products_infos = get_products_list(options, config, logger)
# Print some informations
- logger.write(
- _('Executing the make command in the build directories of the application %s\n') %
- UTS.label(config.VARS.application), 1)
+ logger.info(
+ _('Executing the make command in the build directories of the application %s\n') % \
+ UTS.label(config.VARS.application))
- info = [(_("BUILD directory"),
- os.path.join(config.APPLICATION.workdir, 'BUILD'))]
+ info = [(_("BUILD directory"), os.path.join(config.APPLICATION.workdir, 'BUILD'))]
UTS.logger_info_tuples(logger, info)
# Call the function that will loop over all the products and execute
# Print the final state
nb_products = len(products_infos)
if res == 0:
- final_status = "<OK>"
+ final_status = "OK"
else:
- final_status = "<KO>"
+ final_status = "KO"
- logger.info(_("\nMake: %s (%d/%d)\n") % \
- (final_status, nb_products - res, nb_products))
+ msg = _("\nMake: <%s> (%d/%d)\n") % (final_status, nb_products - res, nb_products)
+ logger.info(msg)
- return res
+ return RCO.ReturnCode(final_status, msg)
def get_products_list(options, cfg, logger):
return products_infos
def log_step(logger, header, step):
- logger.write("\r%s%s" % (header, " " * 20), 3)
- logger.write("\r%s%s" % (header, step), 3)
- logger.write("\n==== %s \n" % UTS.info(step), 4)
- logger.flush()
+ msg = "\r%s%s" % (header, " " * 20)
+ msg += "\r%s%s" % (header, step)
+ logger.info(msg)
+ logger.debug("\n==== %s \n" % UTS.info(step))
def log_res_step(logger, res):
if res == 0:
p_name, p_info = p_name_info
# Logging
- logger.write("\n", 4, False)
- logger.write("################ ", 4)
header = _("Make of %s") % UTS.label(p_name)
header += " %s " % ("." * (20 - len(p_name)))
- logger.write(header, 3)
- logger.write("\n", 4, False)
- logger.flush()
+ logger.info(header)
# Do nothing if he product is not compilable
- if ("properties" in p_info and "compilation" in p_info.properties and
- p_info.properties.compilation == "no"):
+ if ("properties" in p_info and \
+ "compilation" in p_info.properties and \
+ p_info.properties.compilation == "no"):
log_step(logger, header, "ignored")
- logger.write("\n", 3, False)
return 0
# Instantiate the class that manages all the construction commands
# Log the result
if res > 0:
- logger.write("\r%s%s" % (header, " " * len_end_line), 3)
- logger.write("\r" + header + "<KO>")
+ logger.info("\r%s%s" % (header, " " * len_end_line))
+ logger.info("\r" + header + "<KO>")
logger.debug("==== <KO> in make of %s\n" % p_name)
else:
- logger.write("\r%s%s" % (header, " " * len_end_line), 3)
- logger.write("\r" + header + "<OK>")
+ logger.info("\r%s%s" % (header, " " * len_end_line))
+ logger.info("\r" + header + "<OK>")
logger.debug("==== <OK> in make of %s\n" % p_name)
- logger.write("\n")
-
+ logger.info("\n")
return res
def get_nb_proc(product_info, config, make_option):
products_infos = get_products_list(options, config, logger)
# Print some informations
- logger.write(_('Executing the make install command in the build directories of the application %s\n') %
- UTS.label(config.VARS.application), 1)
+ logger.info(_('Executing the make install command in the build directories of the application %s\n') % \
+ UTS.label(config.VARS.application))
info = [(_("BUILD directory"),
os.path.join(config.APPLICATION.workdir, 'BUILD'))]
# Print the final state
nb_products = len(products_infos)
if res == 0:
- final_status = "<OK>"
+ final_status = "OK"
else:
- final_status = "<KO>"
+ final_status = "KO"
- logger.info(_("\nMake install: %s (%d/%d)\n") % \
- (final_status, nb_products - res, nb_products))
+ msg = _("\nMake install: <%s> (%d/%d)\n") % (final_status, nb_products - res, nb_products)
+ logger.info(msg)
- return res
+ return RCO.ReturnCode(final_status, msg)
def get_products_list(options, cfg, logger):
return products_infos
def log_step(logger, header, step):
- logger.write("\r%s%s" % (header, " " * 20), 3)
- logger.write("\r%s%s" % (header, step), 3)
- logger.write("\n==== %s \n" % UTS.info(step), 4)
- logger.flush()
+ logger.info("\r%s%s" % (header, " " * 20), 3)
+ logger.info("\r%s%s" % (header, step), 3)
+ logger.debug("\n==== %s \n" % UTS.info(step), 4)
def log_res_step(logger, res):
if res == 0:
p_name, p_info = p_name_info
# Logging
- logger.write("\n", 4, False)
- logger.write("################ ", 4)
header = _("Make install of %s") % UTS.label(p_name)
header += " %s " % ("." * (20 - len(p_name)))
- logger.write(header, 3)
- logger.write("\n", 4, False)
- logger.flush()
+ logger.info(header)
# Do nothing if he product is not compilable
- if ("properties" in p_info and "compilation" in p_info.properties and
- p_info.properties.compilation == "no"):
+ if ("properties" in p_info and \
+ "compilation" in p_info.properties and \
+ p_info.properties.compilation == "no"):
log_step(logger, header, "ignored")
- logger.write("\n", 3, False)
- return 0
+ return RCO.ReturnCode("OK", "product %s is not compilable" % p_name)
# Instantiate the class that manages all the construction commands
# like cmake, make, make install, make test, environment management, etc...
# Log the result
if res > 0:
- logger.write("\r%s%s" % (header, " " * 20), 3)
- logger.write("\r" + header + "<KO>")
- logger.error("==== <KO> in make install of s\n" % p_name)
+ logger.info("\r%s%s" % (header, " " * 20))
+ logger.info("\r" + header + "<KO>")
+ logger.debug("==== <KO> in make install of s\n" % p_name)
else:
- logger.write("\r%s%s" % (header, " " * 20), 3)
- logger.write("\r" + header + "<OK>")
- logger.write("==== <OK> in make install of %s\n" % p_name)
- logger.write("\n", 3, False)
+ logger.info("\r%s%s" % (header, " " * 20))
+ logger.info("\r" + header + "<OK>")
+ logger.debug("==== <OK> in make install of %s\n" % p_name)
+ logger.info("\n")
return res
src.check_config_has_application(config)
# Display information
- logger.write(_("Packaging application %s\n") % \
+ logger.info(_("Packaging application %s\n") % \
UTS.label(config.VARS.application), 1)
# Get the default directory where to put the packages
# Create a working directory for all files that are produced during the
# package creation and that will be removed at the end of the command
- tmp_working_dir = os.path.join(config.VARS.tmp_root,
- config.VARS.datehour)
+ tmp_working_dir = os.path.join(config.VARS.tmp_root, config.VARS.datehour)
UTS.ensure_path_exists(tmp_working_dir)
- logger.write("\n", 5)
- logger.write(_("The temporary working directory: %s\n") % tmp_working_dir, 5)
+ logger.debug(_("The temporary working directory: %s\n") % tmp_working_dir)
- logger.write("\n", 3)
-
msg = _("Preparation of files to add to the archive")
- logger.write(UTS.label(msg), 2)
- logger.write("\n", 2)
+ logger.info(UTS.label(msg))
d_files_to_add={} # content of the archive
d_paths_to_substitute={}
if options.binaries:
- d_bin_files_to_add = binary_package(config,
- logger,
- options,
- tmp_working_dir)
+ d_bin_files_to_add = binary_package(config, logger, options, tmp_working_dir)
# for all binaries dir, store the substitution that will be required
# for extra compilations
for key in d_bin_files_to_add:
d_paths_to_substitute,
"install_bin.sh")
d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")})
- logger.write("substitutions that need to be done later : \n", 5)
- logger.write(str(d_paths_to_substitute), 5)
- logger.write("\n", 5)
+ logger.debug("substitutions to be done later:\n%s\n" % str(d_paths_to_substitute))
+
else:
# --salomeTool option is not considered when --sources is selected, as this option
# already brings salomeTool!
d_files_to_add.update(project_package(options.project, tmp_working_dir))
if not(d_files_to_add):
- msg = _("Empty dictionnary to build the archive.\n")
+ msg = _("Empty dictionary to build the archive.\n")
logger.error(msg)
return 1
if options.add_files:
for file_path in options.add_files:
if not os.path.exists(file_path):
- msg = _("WARNING: the file %s is not accessible.\n") % file_path
+ msg = _("The file %s is not accessible.\n") % file_path
continue
file_name = os.path.basename(file_path)
d_files_to_add[file_name] = (file_path, file_name)
- logger.write("\n", 2)
-
- logger.write(UTS.label(_("Actually do the package")), 2)
- logger.write("\n", 2)
+ msg = UTS.label(_("Actually do the package"))
+ logger.info("\n%s\n" % msg)
try:
# Creating the object tarfile
def add_files(tar, name_archive, d_content, logger, f_exclude=None):
- '''Create an archive containing all directories and files that are given in
- the d_content argument.
+ """\
+ Create an archive containing all directories and files that are given
+ in the d_content argument.
:param tar tarfile: The tarfile instance used to make the archive.
:param name_archive str: The name of the archive to make.
:param f_exclude Function: the function that filters
:return: 0 if success, 1 if not.
:rtype: int
- '''
+ """
# get the max length of the messages in order to make the display
max_len = len(max(d_content.keys(), key=len))
for name in d_content.keys():
# display information
len_points = max_len - len(name)
- logger.write(name + " " + len_points * "." + " ", 3)
+ logger.info(name + " " + len_points * "." + " ")
# Get the local path and the path in archive
# of the directory or file to add
local_path, archive_path = d_content[name]
return success
def exclude_VCS_and_extensions(filename):
- ''' The function that is used to exclude from package the link to the
- VCS repositories (like .git)
+ """\
+ The function that is used to exclude from package the link to the
+ VCS repositories (like .git)
:param filename Str: The filname to exclude (or not).
:return: True if the file has to be exclude
:rtype: Boolean
- '''
+ """
for dir_name in IGNORED_DIRS:
if dir_name in filename:
return True
'''
# Get all the products that are prepared using an archive
- logger.write("Find archive products ... ")
+ logger.info("Find archive products ... ")
d_archives, l_pinfo_vcs = get_archives(config, logger)
- logger.write("Done\n")
+ logger.info("Done\n")
d_archives_vcs = {}
if not options.with_vcs and len(l_pinfo_vcs) > 0:
# Make archives with the products that are not prepared using an archive
# (git, cvs, svn, etc)
- logger.write("Construct archives for vcs products ... ")
+ logger.info("Construct archives for vcs products ... ")
d_archives_vcs = get_archives_vcs(l_pinfo_vcs,
sat,
config,
logger,
tmp_working_dir)
- logger.write("Done\n")
+ logger.info("Done\n")
# Create a project
- logger.write("Create the project ... ")
- d_project = create_project_for_src_package(config,
- tmp_working_dir,
- options.with_vcs)
- logger.write("Done\n")
+ logger.info("Create the project ... ")
+ d_project = create_project_for_src_package(config, tmp_working_dir, options.with_vcs)
+ logger.info("Done\n")
# Add salomeTools
tmp_sat = add_salomeTools(config, tmp_working_dir)
# command and thus construct an archive that will not contain the patches
l_prod_names = [pn for pn, __ in l_pinfo_vcs]
# clean
- logger.write(_("clean sources\n"))
+ logger.info(_("clean sources\n"))
args_clean = config.VARS.application
args_clean += " --sources --products "
args_clean += ",".join(l_prod_names)
sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger)
# source
- logger.write(_("get sources"))
+ logger.info(_("get sources"))
args_source = config.VARS.application
args_source += " --products "
args_source += ",".join(l_prod_names)
product_info,
max_product_name_len,
logger)
- logger.write(patch_res, 1, False)
+ logger.info(patch_res)
if return_code:
good_result += 1
# Display the results (how much passed, how much failed, etc...)
- logger.write("\n", 2, False)
+ logger.info("\n")
if good_result == len(products_infos):
- status = "<OK>"
+ status = "OK"
else:
- status = "<KO>"
+ status = "KO"
# write results
- logger.info(_("\nPatching sources of the application: %s (%d/%d)\n") % \
- (status, good_result, len(products_infos)))
+ msg = ("\nPatching sources of the application: <%s> (%d/%d)\n") % \
+ (status, good_result, len(products_infos))
+ logger.info(msg)
- return len(products_infos) - good_result
+ return RCO.ReturnCode(status, msg)
def apply_patch(config, product_info, max_product_name_len, logger):
- '''The method called to apply patches on a product
+ """\
+ The method called to apply patches on a product
:param config Config: The global configuration
:param product_info Config: The configuration specific to
the product to be patched
:param logger Logger: The logger instance to use for the display and logging
- :return: (True if it succeed, else False, message to display)
- :rtype: (boolean, str)
- '''
+ :return: RCO.ReturnCode
+ """
# if the product is native, do not apply patch
if src.product.product_is_native(product_info):
# display and log
- logger.write('%s: ' % UTS.label(product_info.name), 4)
- logger.write(' ' * (max_product_name_len - len(product_info.name)), 4, False)
- logger.write("\n", 4, False)
- msg = _("The %s product is native. Do not apply any patch.") % product_info.name
- logger.write(msg, 4)
- logger.write("\n", 4)
- return True, ""
+ logger.info('%s: ' % UTS.label(product_info.name))
+ logger.info(' ' * (max_product_name_len - len(product_info.name)))
+ logger.info("\n")
+ msg = _("The %s product is native. Do not apply any patch") % product_info.name
+ logger.info(msg + "\n")
+ return RCO.ReturnCode("OK", msg)
if not "patches" in product_info or len(product_info.patches) == 0:
# display and log
- logger.write('%s: ' % UTS.label(product_info.name), 4)
- logger.write(' ' * (max_product_name_len - len(product_info.name)), 4, False)
- logger.write("\n", 4, False)
+ logger.info('%s: ' % UTS.label(product_info.name))
+ logger.info(' ' * (max_product_name_len - len(product_info.name)))
+ logger.info("\n")
msg = _("No patch for the %s product") % product_info.name
- logger.write(msg, 4)
- logger.write("\n", 4)
- return True, ""
+ logger.info(msg + "\n")
+ return RCO.ReturnCode("OK", msg)
else:
# display and log
- logger.write('%s: ' % UTS.label(product_info.name), 3)
- logger.write(' ' * (max_product_name_len - len(product_info.name)), 3, False)
- logger.write("\n", 4, False)
+ logger.info('%s: ' % UTS.label(product_info.name))
+ logger.info(' ' * (max_product_name_len - len(product_info.name)))
+ logger.info("\n")
if not os.path.exists(product_info.source_dir):
- msg = _("No sources found for the %s product\n") % product_info.name
- logger.write(UTS.red(msg), 1)
- return False, ""
+ msg = _("No sources found for the %s product") % product_info.name
+ logger.error(UTS.red(msg))
+ return RCO.ReturnCode("KO", msg)
# At this point, there one or more patches and the source directory exists
retcode = []
patch_cmd = "patch -p1 < %s" % patch
# Write the command in the terminal if verbose level is at 5
- logger.write((" >%s\n" % patch_cmd),5)
+ logger.info(" >%s\n" % patch_cmd)
# Write the command in the log file (can be seen using 'sat log')
logger.logTxtFile.write("\n >%s\n" % patch_cmd)
if len(details) > 0:
retcode.extend(details)
- res = not (False in res)
+ if False in res:
+ rc = "KO"
+ else:
+ rc = "OK"
- return res, "\n".join(retcode) + "\n"
+ return RCO.ReturnCode(rc, "\n".join(retcode))
if not options.force and len(ldev_products) > 0:
l_products_not_getted = find_products_already_getted(ldev_products)
if len(l_products_not_getted) > 0:
- msg = _("Do not get the source of the following products in development mode\n"
- " Use the --force option to overwrite it.\n")
- logger.write(UTS.red(msg), 1)
+ msg = _("""\
+Do not get the source of the following products in development mode.
+Use the --force option to overwrite it.
+""")
+ logger.error(UTS.red(msg))
args_product_opt_clean = remove_products(args_product_opt_clean,
l_products_not_getted,
logger)
- logger.write("\n", 1)
args_product_opt_patch = args_product_opt
if not options.force_patch and len(ldev_products) > 0:
l_products_with_patchs = find_products_with_patchs(ldev_products)
if len(l_products_with_patchs) > 0:
- msg = _("do not patch the following products in development mode\n"
- " Use the --force_patch option to overwrite it.\n")
- logger.write(UTS.red(msg), 1)
+ msg = _("""
+Do not patch the following products in development mode.
+Use the --force_patch option to overwrite it.
+""")
+ logger.error(UTS.red(msg))
args_product_opt_patch = remove_products(args_product_opt_patch,
l_products_with_patchs,
logger)
- logger.write("\n", 1)
# Construct the final commands arguments
args_clean = args_appli + args_product_opt_clean + " --sources"
# Call the commands using the API
if do_clean:
msg = _("Clean the source directories ...")
- logger.write(msg, 3)
- logger.flush()
+ logger.info(msg)
DBG.tofix("args_clean and TODO remove returns", args_clean, True)
res_clean = runner.getCommand("clean").run(args_clean)
return res_clean + res_source + res_patch
if do_source:
msg = _("Get the sources of the products ...")
- logger.write(msg, 5)
+ logger.debug(msg)
res_source = runner.getCommand("source").run(args_source)
if do_patch:
msg = _("Patch the product sources (if any) ...")
- logger.write(msg, 5)
+ logger.debug(msg)
res_patch = runner.getCommand("patch").run(args_patch)
return res_clean + res_source + res_patch
def remove_products(arguments, l_products_info, logger):
- '''function that removes the products in l_products_info from arguments list.
+ """
+ function that removes the products in l_products_info from arguments list.
:param arguments str: The arguments from which to remove products
:param l_products_info list: List of
:param logger Logger: The logger instance to use for the display and logging
:return: The updated arguments.
:rtype: str
- '''
+ """
args = arguments
for i, (product_name, __) in enumerate(l_products_info):
args = args.replace(',' + product_name, '')
end_text = ', '
if i+1 == len(l_products_info):
end_text = '\n'
- logger.write(product_name + end_text, 1)
+ logger.info(product_name + end_text)
return args
def find_products_already_getted(l_products):
if options.prefix is None:
msg = _("The --%s argument is required\n") % "prefix"
- logger.write(UTS.red(msg), 1)
- return 1
+ logger.error(msg)
+ return RCO.ReturnCode("KO", msg)
retcode = generate_profile_sources(config, options, logger)
res = config.APPLICATION.name + "_PROFILE"
return res
-##
-# Generates the sources of the profile
def generate_profile_sources( config, options, logger ):
+ """
+ Generates the sources of the profile
+ """
#Check script app-quickstart.py exists
kernel_cfg = src.product.get_product_config(config, "KERNEL")
kernel_root_dir = kernel_cfg.install_dir
raise Exception(_("KERNEL is not installed"))
script = os.path.join(kernel_root_dir,"bin","salome","app-quickstart.py")
if not os.path.exists( script ):
- raise Exception(
- _("KERNEL's install has not the script app-quickstart.py") )
+ raise Exception( _("KERNEL's install has not the script app-quickstart.py") )
# Check that GUI is installed
gui_cfg = src.product.get_product_config(config, "GUI")
command += " --force"
if options.slogan :
command += " --slogan=%s" % options.slogan
- logger.write("\n>" + command + "\n", 5, False)
+ logger.debug("\n>" + command + "\n")
#Run command
os.environ["KERNEL_ROOT_DIR"] = kernel_root_dir
if res != 0:
raise Exception(_("Cannot create application, code = %d\n") % res)
else:
- logger.write(
- _("Profile sources were generated in directory %s.\n" % prefix), 3 )
+ logger.info( _("Profile sources were generated in directory %s.\n" % prefix) )
return res
-##
-# Updates the pyconf
+
def update_pyconf( config, options, logger ):
+ """
+ Updates the pyconf
+ """
#Save previous version
pyconf = config.VARS.product + '.pyconf'
stdout=logger.logTxtFile,
stderr=subprocess.STDOUT)
- # Display information : how to get the logs
- messageFirstPart = _("\nEnd of execution. To see the traces, "
- "please tap the following command :\n")
- messageSecondPart = UTS.label( config.VARS.salometoolsway + os.sep +
- "sat log " + config.VARS.application + "\n")
- logger.write(" %s\n" %(messageFirstPart + messageSecondPart), 2)
+ # Display information: how to get the logs
+ msg1 = _("End of 'sat run'. To see traces, type:")
+ msg2 = UTS.label("sat log " + config.VARS.application)
+ msg = "%s\n%s\n" % (msg1, msg2)
+ logger.info(msg)
- return 0
+ return RCO.ReturnCode("OK", msg)
# Print some informations
msg = ('Executing the script in the build directories of the application %s\n') % \
UTS.label(config.VARS.application)
- logger.write(msg, 1)
+ logger.info(msg)
info = [(_("BUILD directory"), os.path.join(config.APPLICATION.workdir, 'BUILD'))]
UTS.logger_info_tuples(logger, info)
return products_infos
def log_step(logger, header, step):
- logger.write("\r%s%s" % (header, " " * 20), 3)
- logger.write("\r%s%s" % (header, step), 3)
- logger.write("\n==== %s \n" % UTS.info(step), 4)
- logger.flush()
+ logger.info("\r%s%s" % (header, " " * 20))
+ logger.info("\r%s%s" % (header, step))
+ logger.debug("\n==== %s \n" % UTS.info(step))
def log_res_step(logger, res):
if res == 0:
p_name, p_info = p_name_info
# Logging
- logger.write("\n", 4, False)
- logger.write("################ ", 4)
header = _("Running script of %s") % UTS.label(p_name)
header += " %s " % ("." * (20 - len(p_name)))
- logger.write(header, 3)
- logger.write("\n", 4, False)
- logger.flush()
+ logger.info("\n" + header)
# Do nothing if he product is not compilable or has no compilation script
- if ( ("properties" in p_info and
- "compilation" in p_info.properties and
- p_info.properties.compilation == "no") or
- (not src.product.product_has_script(p_info)) ):
+ test1 = "properties" in p_info and \
+ "compilation" in p_info.properties and \
+ p_info.properties.compilation == "no"
+ if ( test1 or (not src.product.product_has_script(p_info)) ):
log_step(logger, header, "ignored")
- logger.write("\n", 3, False)
+ logger.info("\n")
return 0
# Instantiate the class that manages all the construction commands
# Log the result
if res > 0:
- logger.write("\r%s%s" % (header, " " * len_end_line), 3)
- logger.write("\r" + header + "<KO>")
+ logger.info("\r%s%s" % (header, " " * len_end_line))
+ logger.info("\r" + header + "<KO>")
logger.debug("==== <KO> in script execution of %s\n" % p_name)
else:
- logger.write("\r%s%s" % (header, " " * len_end_line), 3)
- logger.write("\r" + header + "<OK>"))
+ logger.info("\r%s%s" % (header, " " * len_end_line))
+ logger.info("\r" + header + "<OK>")
logger.debug("==== <OK> in script execution of %s\n" % p_name)
- logger.write("\n")
+ logger.info("\n")
return res
# Print the input command
msg = _("Command to execute:\n%s\nExecution ... ") % options.command
- logger.write(msg, 3)
+ logger.info(msg)
# Call the input command
res = subprocess.call(options.command,
# Format the result to be 0 (success) or 1 (fail)
if res != 0:
- res = 1
- logger.info("<KO>\n")
+ status = "KO"
else:
- logger.info("<OK>\n")
-
- return res
+ status = "OK"
+
+ logger.info("<%s>\n" % status)
+ return RCO.ReturnCode(status, "shell command done")
src.check_config_has_application( config )
# Print some informations
- logger.write(_('Getting sources of the application %s\n') % \
+ logger.info(_('Getting sources of the application %s\n') % \
UTS.label(config.VARS.application), 1)
logger.info(" workdir = %s\n" % config.APPLICATION.workdir)
def get_source_for_dev(config, product_info, source_dir, logger, pad):
- '''The method called if the product is in development mode
+ """\
+ Called if the product is in development mode
:param config Config: The global configuration
:param product_info Config: The configuration specific to
:param pad int: The gap to apply for the terminal display
:return: True if it succeed, else False
:rtype: boolean
- '''
+ """
# Call the function corresponding to get the sources with True checkout
retcode = get_product_sources(config,
logger,
pad,
checkout=True)
- logger.write("\n", 3, False)
# +2 because product name is followed by ': '
- logger.write(" " * (pad+2), 3, False)
-
- logger.write('dev: %s ... ' % \
- UTS.info(product_info.source_dir), 3, False)
- logger.flush()
+ logger.info("\n" + " " * (pad+2))
+ logger.info('dev: %s ... ' % UTS.info(product_info.source_dir))
return retcode
pad,
is_dev=False,
environ = None):
- '''The method called if the product is to be get in git mode
+ """\
+ Called if the product is to be get in git mode
:param product_info Config: The configuration specific to
the product to be prepared
extracting.
:return: True if it succeed, else False
:rtype: boolean
- '''
+ """
# The str to display
coflag = 'git'
msg += " " * (pad + 50 - len(repo_git))
msg += " tag:%s" % product_info.git_info.tag
msg += "%s. " % "." * (10 - len(product_info.git_info.tag))
- logger.write("\n" + msg)
+ logger.info("\n" + msg)
# Call the system function that do the extraction in git mode
retcode = SYSS.git_extract(repo_git,
raise Exception(_("Archive not found: '%s'") % \
product_info.archive_info.archive_name)
- logger.write('arc:%s ... ' % \
- UTS.info(product_info.archive_info.archive_name),
- 3, False)
- logger.flush()
+ logger.info('arc:%s ... ' % UTS.info(product_info.archive_info.archive_name))
# Call the system function that do the extraction in archive mode
retcode, NameExtractedDirectory = SYSS.archive_extract(
product_info.archive_info.archive_name,
logger.error(msg)
return False
- logger.write('DIR: %s ... ' % UTS.info(
- product_info.dir_info.dir), 3)
-
- retcode = src.Path(product_info.dir_info.dir).copy(source_dir)
-
+ logger.info('DIR: %s ... ' % UTS.info(product_info.dir_info.dir))
+ retcode = src.Path(product_info.dir_info.dir).copy(source_dir)
return retcode
def get_source_from_cvs(user,
# at least one '.' is visible
msg += " %s. " % ("." * (10 - len(product_info.cvs_info.tag)))
- logger.write(msg)
+ logger.info(msg)
# Call the system function that do the extraction in cvs mode
retcode = SYSS.cvs_extract(protocol, user,
coflag = 'svn'
if checkout: coflag = coflag.upper()
- logger.write('%s:%s ... ' % (coflag, product_info.svn_info.repo)
+ logger.info('%s:%s ... ' % (coflag, product_info.svn_info.repo))
# Call the system function that do the extraction in svn mode
retcode = SYSS.svn_extract(user,
- product_info.svn_info.repo,
- product_info.svn_info.tag,
- source_dir,
- logger,
- checkout,
- environ)
+ product_info.svn_info.repo,
+ product_info.svn_info.tag,
+ source_dir,
+ logger,
+ checkout,
+ environ)
return retcode
def get_product_sources(config,
if product_info.get_source == "native":
# skip
msg = "<OK>" + _("\ndo nothing because the product is of type 'native'.\n")
- logger.write(msg)
+ logger.info(msg)
return True
if product_info.get_source == "fixed":
# skip
msg = "<OK>" + _("\ndo nothing because the product is of type 'fixed'.\n")
- logger.write(msg)
+ logger.info(msg)
return True
# if the get_source is not in [git, archive, cvs, svn, fixed, native]
msg = _("Unknown get source method '%s' for product %s") % \
( product_info.get_source, product_info.name)
- logger.write("%s ... " % msg)
+ logger.info("%s ... " % msg)
return False
def get_all_product_sources(config, products, logger):
source_dir = src.Path('')
# display and log
- logger.write('%s: ' % UTS.label(product_name), 3)
- logger.write(' ' * (max_product_name_len - len(product_name)), 3, False)
- logger.write("\n", 4, False)
+ logger.info('%s: ' % UTS.label(product_name))
+ logger.info(' ' * (max_product_name_len - len(product_name)))
# Remove the existing source directory if
# the product is not in development mode
logger.error(msg)
return 1
- # CNC inutile
- # Ask user confirmation if a module of the same name already exists
- #if options.name in config.PRODUCTS and not runner.options.batch:
- # logger.write(UTS.red(
- # _("A module named '%s' already exists." % options.name)), 1)
- # logger.write("\n", 1)
- # rep = input(_("Are you sure you want to continue? [Yes/No] "))
- # if rep.upper() != _("YES"):
- # return 1
-
if options.target is None:
logger.error(msg_miss % "target")
return 1
logger.error(msg)
return 1
- # CNC inutile
- #if options.template == "Application":
- # if "_APPLI" not in options.name and not runner.options.batch:
- # msg = _("An Application module named '..._APPLI' "
- # "is usually recommended.")
- # logger.write(UTS.red(msg), 1)
- # logger.write("\n", 1)
- # rep = input(_("Are you sure you want to continue? [Yes/No] "))
- # if rep.upper() != _("YES"):
- # return 1
-
msg = ""
msg += _('Create sources from template\n')
msg += ' destination = %s\n' % target_dir
raise Exception(_("Template not found: %s") % template)
return template_src_dir
-##
-# Prepares a module from a template.
+
+
def prepare_from_template(config,
name,
template,
target_dir,
conf_values,
logger):
+ """Prepares a module from a template."""
template_src_dir = search_template(config, template)
res = 0
# copy the template
if os.path.isfile(template_src_dir):
- logger.write(" " + _(
- "Extract template %s\n") % UTS.info(
- template), 4)
+ logger.info(_("Extract template %s\n") % UTS.info(template))
SYSS.archive_extract(template_src_dir, target_dir)
else:
- logger.write(" " + _(
- "Copy template %s\n") % UTS.info(
- template), 4)
+ logger.info(_("Copy template %s\n") % UTS.info(template))
shutil.copytree(template_src_dir, target_dir)
- logger.write("\n", 5)
+
compo_name = name
if name.endswith("CPP"):
tsettings = TemplateSettings(compo_name, settings_file, target_dir)
# first rename the files
- logger.write(" " + UTS.label(_("Rename files\n")), 4)
+ logger.debug(UTS.label(_("Rename files\n"))
for root, dirs, files in os.walk(target_dir):
for fic in files:
ff = fic.replace(tsettings.file_subst, compo_name)
raise Exception(
_("Destination file already exists: %s") % \
os.path.join(root, ff) )
- logger.write(" %s -> %s\n" % (fic, ff), 5)
+ logger.debug(" %s -> %s\n" % (fic, ff))
os.rename(os.path.join(root, fic), os.path.join(root, ff))
# rename the directories
- logger.write("\n", 5)
- logger.write(" " + UTS.label(_("Rename directories\n")),
- 4)
+ logger.debug(UTS.label(_("Rename directories\n")))
for root, dirs, files in os.walk(target_dir, topdown=False):
for rep in dirs:
dd = rep.replace(tsettings.file_subst, compo_name)
raise Exception(
_("Destination directory already exists: %s") % \
os.path.join(root, dd) )
- logger.write(" %s -> %s\n" % (rep, dd), 5)
+ logger.debug(" %s -> %s\n" % (rep, dd))
os.rename(os.path.join(root, rep), os.path.join(root, dd))
# ask for missing parameters
- logger.write("\n", 5)
- logger.write(" " + UTS.label(
- _("Make substitution in files\n")), 4)
- logger.write(" " + _("Delimiter =") + " %s\n" % tsettings.delimiter_char,
- 5)
- logger.write(" " + _("Ignore Filters =") + " %s\n" % ', '.join(
- tsettings.ignore_filters), 5)
+ logger.debug(UTS.label(_("Make substitution in files\n")))
+ logger.debug(_("Delimiter =") + " %s\n" % tsettings.delimiter_char)
+ logger.debug(_("Ignore Filters =") + " %s\n" % ', '.join(tsettings.ignore_filters))
dico = tsettings.get_parameters(conf_values)
- logger.write("\n", 3)
# override standard string.Template class to use the desire delimiter
class CompoTemplate(string.Template):
delimiter = tsettings.delimiter_char
# do substitution
- logger.write("\n", 5, True)
pathlen = len(target_dir) + 1
for root, dirs, files in os.walk(target_dir):
for fic in files:
fpath = os.path.join(root, fic)
if not tsettings.check_file_for_substitution(fpath[pathlen:]):
- logger.write(" - %s\n" % fpath[pathlen:], 5)
+ logger.debug(" - %s\n" % fpath[pathlen:])
continue
# read the file
m = file(fpath, 'r').read()
if d != m:
changed = "*"
file(fpath, 'w').write(d)
- logger.write(" %s %s\n" % (changed, fpath[pathlen:]), 5)
+ logger.debug(" %s %s\n" % (changed, fpath[pathlen:]))
if not tsettings.has_pyconf:
- logger.write(UTS.red(_(
- "Definition for sat not found in settings file.")) + "\n", 2)
+ logger.error(_("Definition for sat not found in settings file."))
else:
definition = tsettings.pyconf % dico
pyconf_file = os.path.join(target_dir, name + '.pyconf')
f = open(pyconf_file, 'w')
f.write(definition)
f.close
- logger.write(_(
- "Create configuration file: ") + UTS.info(
- pyconf_file) + "\n", 2)
+ logger.info(_("Create configuration file: ") + pyconf_file)
if len(tsettings.post_command) > 0:
cmd = tsettings.post_command % dico
- logger.write("\n", 5, True)
- logger.write(_(
- "Run post command: ") + UTS.info(cmd) + "\n", 3)
+ logger.info(_("Run post command: ") + cmd)
p = subprocess.Popen(cmd, shell=True, cwd=target_dir)
p.wait()
import src.utilsSat as UTS
from src.salomeTools import _BaseCommand
import src.ElementTree as etree
-from src.xmlManager import add_simple_node
+import src.xmlManager as XMLMGR
try:
from hashlib import sha1
# the test base is specified either by the application, or by the --base option
with_application = False
if config.VARS.application != 'None':
- logger.write(
- _('Running tests on application %s\n') %
- UTS.label(config.VARS.application), 1)
+ logger.info(_('Running tests on application %s\n') %
+ UTS.label(config.VARS.application))
with_application = True
elif not options.base:
raise Exception(
if with_application:
# check if environment is loaded
if 'KERNEL_ROOT_DIR' in os.environ:
- logger.write( UTS.red(
- _("WARNING: SALOME environment already sourced")) + "\n", 1 )
-
-
+ logger.warning(_("SALOME environment already sourced"))
+
elif options.launcher:
- logger.write(UTS.red(_("Running SALOME application.")) + "\n\n", 1)
+ logger.info(_("Running SALOME application."))
else:
msg = _("""\
Impossible to find any launcher.
-Please specify an application or a launcher\n""")
+Please specify an application or a launcher
+""")
logger.error(msg)
return 1
retcode = test_runner.run_all_tests()
logger.allowPrintLevel = True
- logger.write(_("Tests finished"), 1)
- logger.write("\n", 2, False)
+ logger.info(_("Tests finished\n"))
- logger.write(_("\nGenerate the specific test log\n"), 5)
+ logger.debug(_("Generate the specific test log\n"))
log_dir = UTS.get_log_path(config)
out_dir = os.path.join(log_dir, "TEST")
UTS.ensure_path_exists(out_dir)
# Add the historic files into the log files list of the command
logger.l_logFiles.append(historic_xml_path)
- logger.write(
- _("Removing the temporary directory: %s\n" %
- test_runner.tmp_working_dir), 5 )
+ logger.debug(_("Removing the temporary directory: %s") % test_runner.tmp_working_dir)
if os.path.exists(test_runner.tmp_working_dir):
shutil.rmtree(test_runner.tmp_working_dir)
while not pathIsOk:
try:
# create test results directory if necessary
- #logger.write("FINAL = %s\n" % finalPath, 5)
+ #logger.debug("FINAL = %s\n" % finalPath)
if not os.access(finalPath, os.F_OK):
#shutil.rmtree(finalPath)
os.makedirs(finalPath)
if not os.access(os.path.join(finalPath, '.objects'), os.F_OK):
os.makedirs(os.path.join(finalPath, '.objects'))
- logger.write(_('copy tests results to %s ... ') % finalPath, 3)
- logger.flush()
- #logger.write("\n", 5)
+ logger.info(_('copy tests results to %s ... ') % finalPath)
# copy env_info.py
shutil.copy2(os.path.join(in_dir, what, 'env_info.py'),
continue
os.makedirs(outtestbase)
- #logger.write(" copy testbase %s\n" % testbase, 5)
+ #logger.debug("copy testbase %s\n" % testbase)
for grid_ in [m for m in os.listdir(intestbase) if os.path.isdir(
os.path.join(intestbase, m))]:
outgrid = os.path.join(outtestbase, grid_)
ingrid = os.path.join(intestbase, grid_)
os.makedirs(outgrid)
- #logger.write(" copy grid %s\n" % grid_, 5)
+ #logger.debug("copy grid %s" % grid_)
if grid_ == 'RESSOURCES':
for file_name in os.listdir(ingrid):
# de remontee de log des tests
print "TRACES OP - test.py/create_test_report() : xml_history_path = '#%s#'" %xml_history_path
+ ASNODE = XMLMGR.add_simple_node # shortcut
+
if withappli:
if not first_time:
for node in (prod_node.findall("version_to_download") +
prod_node.findall("out_dir")):
prod_node.remove(node)
- add_simple_node(prod_node, "version_to_download",
- config.APPLICATION.name)
+ ASNODE(prod_node, "version_to_download", config.APPLICATION.name)
- add_simple_node(prod_node, "out_dir", config.APPLICATION.workdir)
+ ASNODE(prod_node, "out_dir", config.APPLICATION.workdir)
# add environment
if not first_time:
for node in prod_node.findall("exec"):
prod_node.remove(node)
- exec_node = add_simple_node(prod_node, "exec")
+ exec_node = ASNODE(prod_node, "exec")
exec_node.append(etree.Element("env", name="Host", value=config.VARS.node))
exec_node.append(etree.Element("env", name="Architecture",
value=config.VARS.dist))
if 'TESTS' in config:
if first_time:
- tests = add_simple_node(prod_node, "tests")
- known_errors = add_simple_node(prod_node, "known_errors")
- new_errors = add_simple_node(prod_node, "new_errors")
- amend = add_simple_node(prod_node, "amend")
+ tests = ASNODE(prod_node, "tests")
+ known_errors = ASNODE(prod_node, "known_errors")
+ new_errors = ASNODE(prod_node, "new_errors")
+ amend = ASNODE(prod_node, "amend")
else:
tests = prod_node.find("tests")
known_errors = prod_node.find("known_errors")
for testbase in tt.keys():
if first_time:
- gn = add_simple_node(tests, "testbase")
+ gn = ASNODE(tests, "testbase")
else:
gn = tests.find("testbase")
# initialize all grids and session to "not executed"
for test in tt[testbase]:
if not grids.has_key(test.grid):
if first_time:
- mn = add_simple_node(gn, "grid")
+ mn = ASNODE(gn, "grid")
mn.attrib['name'] = test.grid
else:
l_mn = gn.findall("grid")
mn = grid_node
break
if mn == None:
- mn = add_simple_node(gn, "grid")
+ mn = ASNODE(gn, "grid")
mn.attrib['name'] = test.grid
grids[test.grid] = mn
if not sessions.has_key("%s/%s" % (test.grid, test.session)):
if first_time:
- tyn = add_simple_node(mn, "session")
+ tyn = ASNODE(mn, "session")
tyn.attrib['name'] = test.session
else:
l_tyn = mn.findall("session")
tyn = session_node
break
if tyn == None:
- tyn = add_simple_node(mn, "session")
+ tyn = ASNODE(mn, "session")
tyn.attrib['name'] = test.session
sessions["%s/%s" % (test.grid, test.session)] = tyn
for script in test.script:
if first_time:
- tn = add_simple_node(sessions[
+ tn = ASNODE(sessions[
"%s/%s" % (test.grid, test.session)],
"test")
tn.attrib['session'] = test.session
tn.attrib['script'] = script.name
- hn = add_simple_node(tn, "history")
+ hn = ASNODE(tn, "history")
else:
l_tn = sessions["%s/%s" % (test.grid, test.session)].findall(
"test")
break
if tn == None:
- tn = add_simple_node(sessions[
+ tn = ASNODE(sessions[
"%s/%s" % (test.grid, test.session)],
"test")
tn.attrib['session'] = test.session
tn.attrib['script'] = script.name
- hn = add_simple_node(tn, "history")
+ hn = ASNODE(tn, "history")
else:
# Get or create the history node for the current test
if len(tn.findall("history")) == 0:
- hn = add_simple_node(tn, "history")
+ hn = ASNODE(tn, "history")
else:
hn = tn.find("history")
# Put the last test data into the history
if 'res' in tn.attrib:
attributes = {"date_hour" : date_hour,
"res" : tn.attrib['res'] }
- add_simple_node(hn,
+ ASNODE(hn,
"previous_test",
attrib=attributes)
for node in tn:
if 'callback' in script:
try:
- cnode = add_simple_node(tn, "callback")
+ cnode = ASNODE(tn, "callback")
if src.architecture.is_windows():
import string
cnode.text = filter(
zz = (script.callback[:exc.start] +
'?' +
script.callback[exc.end-2:])
- cnode = add_simple_node(tn, "callback")
+ cnode = ASNODE(tn, "callback")
cnode.text = zz.decode("UTF-8")
# Add the script content
- cnode = add_simple_node(tn, "content")
+ cnode = ASNODE(tn, "content")
cnode.text = script.content
# Add the script execution log
- cnode = add_simple_node(tn, "out")
+ cnode = ASNODE(tn, "out")
cnode.text = script.out
if 'amend' in script:
- cnode = add_simple_node(tn, "amend")
+ cnode = ASNODE(tn, "amend")
cnode.text = script.amend.decode("UTF-8")
if script.time < 0:
tn.attrib['res'] = script.res
if "amend" in script:
- amend_test = add_simple_node(amend, "atest")
+ amend_test = ASNODE(amend, "atest")
amend_test.attrib['name'] = os.path.join(test.grid,
test.session,
script.name)
else: nb_not_run += 1
if "known_error" in script:
- kf_script = add_simple_node(known_errors, "error")
+ kf_script = ASNODE(known_errors, "error")
kf_script.attrib['name'] = os.path.join(test.grid,
test.session,
script.name)
kf_script.attrib['overdue'] = str(overdue)
elif script.res == src.KO_STATUS:
- new_err = add_simple_node(new_errors, "new_error")
+ new_err = ASNODE(new_errors, "new_error")
script_path = os.path.join(test.grid,
test.session, script.name)
new_err.attrib['name'] = script_path
if not xmlname.endswith(".xml"):
xmlname += ".xml"
- src.xmlManager.write_report(os.path.join(dest_path, xmlname),
- root,
- "test.xsl")
- src.xmlManager.write_report(xml_history_path,
- root,
- "test_history.xsl")
+ XMLMGR.write_report(os.path.join(dest_path, xmlname), root, "test.xsl")
+ XMLMGR.write_report(xml_history_path, root, "test_history.xsl")
return src.OK_STATUS
def generate_history_xml_path(config, test_base):
command = "which lrelease"
res = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,env=builder.build_environ.environ.environ).communicate()
if res[1] != "": #an error occured
- logger.write("ERROR: %s" % res[1])
+ logger.error(res[1])
builder.log(res[1]+"\n")
return 1
("<reset>", ST.RESET_ALL),
("<info>", ST.RESET_ALL),
("<header>", FG.BLUE),
+ ("<label>", FG.CYAN),
+ ("<success>", FG.GREEN),
("<warning>", FG.RED),
("<error>", FG.RED + ST.BRIGHT),
("<critical>", FG.RED + ST.BRIGHT),
("<reset>", ""),
("<info>", ""),
("<header>", ""),
+ ("<label>", ""),
+ ("<success>", ""),
("<warning>", ""),
("<error>", ""),
("<critical>", ""),
if "debug" in self.product_info and self.product_info.debug == "yes":
self.debug_mode = True
- ##
- # Shortcut method to log in log file.
def log(self, text, level, showInfo=True):
- self.logger.write(text, level, showInfo)
+ """Shortcut method to log in log file."""
+ self.logger.info(text)
self.logger.logTxtFile.write(UTS.cleancolor(text))
- self.logger.flush()
- ##
- # Shortcut method to log a command.
def log_command(self, command):
+ """Shortcut method to log a command."""
self.log("> %s\n" % command, 5)
- ##
- # Prepares the environment.
- # Build two environment: one for building and one for testing (launch).
def prepare(self):
+ """\
+ Prepares the environment.
+ Build two environment: one for building and one for testing (launch).
+ """
if not self.build_dir.exists():
# create build dir
return self.get_result()
- ##
- # Performs a build with a script.
def do_python_script_build(self, script, nb_proc):
+ """Performs a build with a script."""
# script found
- self.logger.write(_("Compile %(product)s using script %(script)s\n") %
- { 'product': self.product_info.name,
- 'script': UTS.label(script) }, 4)
+ self.logger.info(_("Compile %s using script %s\n") % \
+ (self.product_info.name, UTS.label(script) )
try:
import imp
product = self.product_info.name
retcode = pymodule.compil(self.config, self, self.logger)
except:
__, exceptionValue, exceptionTraceback = sys.exc_info()
- self.logger.write(str(exceptionValue), 1)
+ self.logger.error(str(exceptionValue))
import traceback
traceback.print_tb(exceptionTraceback)
traceback.print_exc()
retcode = 1
finally:
self.put_txt_log_in_appli_log_dir("script")
-
return retcode
def complete_environment(self, make_options):
def header(msg):
return "<info>"+msg+"<reset>"
+def label(msg):
+ return "<label>"+msg+"<reset>"
+
+def success(msg):
+ return "<success>"+msg+"<reset>"
+
def warning(msg):
return "<warning>"+msg+"<reset>"
##############################################################################
-# log utilities (TODO: set in loggingSat class ? ...)
+# log utilities (TODO: set in loggingSat class, later, changing tricky xml ?
##############################################################################
-_log_macro_command_file_expression = "^[0-9]{8}_+[0-9]{6}_+.*\.xml$"
-
+
def date_to_datetime(date):
"""\
From a string date in format YYYYMMDD_HHMMSS
timedelta.microseconds + 0.0 +
(timedelta.seconds + timedelta.days * 24 * 3600) * 10 ** 6) / 10 ** 6
+_log_macro_command_file_expression = "^[0-9]{8}_+[0-9]{6}_+.*\.xml$"
+_log_all_command_file_expression = "^.*[0-9]{8}_+[0-9]{6}_+.*\.xml$"
+
def show_command_log(logFilePath, cmd, application, notShownCommands):
"""\
Used in updateHatXml.
"""
# When the command is not in notShownCommands, no need to go further :
# Do not show
+
+ import src.xmlManager as XMLMGR # avoid import cross utilsSat
+
if cmd in notShownCommands:
return RCO.ReturnCode("KO", "in notShownCommands", None)
# Get the application of the log file
- try:
- logFileXml = src.xmlManager.ReadXmlFile(logFilePath)
- except Exception as e:
+ if True: #try:
+ logFileXml = XMLMGR.ReadXmlFile(logFilePath)
+ else: #except Exception as e:
msg = _("The log file '%s' cannot be read:" % logFilePath)
return RCO.ReturnCode("KO", msg, None)
:param application str: the name of the application if there is any
"""
# Create an instance of XmlLogFile class to create hat.xml file
+
+ import src.xmlManager as XMLMGR # avoid import cross utilsSat
+
xmlHatFilePath = os.path.join(logDir, 'hat.xml')
- xmlHat = src.xmlManager.XmlLogFile(xmlHatFilePath,
- "LOGlist", {"application" : application})
+ xmlHat = XMLMGR.XmlLogFile(xmlHatFilePath, "LOGlist", {"application" : application})
# parse the log directory to find all the command logs,
# then add it to the xml file
lLogFile = list_log_file(logDir, _log_macro_command_file_expression)
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+"""Utilities to read xml logging files
+
+usage:
+ >> import src.xmlManager as XMLMGR
+"""
+
import os
try: # For python2
import sys
except:
pass
-import src.utilsSat as UTS
import src.ElementTree as etree
+import src.utilsSat as UTS
class XmlLogFile(object):
'''Class to manage writing in salomeTools xml log file