# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
+import sys
+import tempfile
+import traceback
import datetime
import time
import csv
import shutil
import itertools
+import re
import paramiko
import src
+import src.ElementTree as etree
STYLESHEET_GLOBAL = "jobs_global_report.xsl"
STYLESHEET_BOARD = "jobs_board_report.xsl"
parser = src.options.Options()
-parser.add_option('n', 'name', 'string', 'jobs_cfg',
- _('The name of the config file that contains'
- ' the jobs configuration'))
+parser.add_option('n', 'name', 'list2', 'jobs_cfg',
+ _('Mandatory: The name of the config file that contains'
+ ' the jobs configuration. Can be a list.'))
parser.add_option('o', 'only_jobs', 'list2', 'only_jobs',
_('Optional: the list of jobs to launch, by their name. '))
parser.add_option('l', 'list', 'boolean', 'list',
parser.add_option('i', 'input_boards', 'string', 'input_boards', _("Optional: "
"the path to csv file that contain "
"the expected boards."),"")
-parser.add_option('n', 'completion', 'boolean', 'no_label',
+parser.add_option('', 'completion', 'boolean', 'no_label',
_("Optional (internal use): do not print labels, Works only "
"with --list."),
False)
self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
# put the job configuration file in order to make it reachable
# on the remote machine
- self.sftp.put(job_file, os.path.join(".salomeTools",
- "Jobs",
- ".jobs_command_file.pyconf"))
+ remote_job_file_name = ".%s" % os.path.basename(job_file)
+ self.sftp.put(job_file, os.path.join(self.sat_path,
+ remote_job_file_name))
except Exception as e:
res = str(e)
self._connection_successful = False
class Job(object):
'''Class to manage one job
'''
- def __init__(self, name, machine, application, board,
- commands, timeout, config, logger, after=None):
+ def __init__(self,
+ name,
+ machine,
+ application,
+ board,
+ commands,
+ timeout,
+ config,
+ job_file_path,
+ logger,
+ after=None,
+ prefix=None):
self.name = name
self.machine = machine
self.out = ""
self.err = ""
-
+
+ self.name_remote_jobs_pyconf = ".%s" % os.path.basename(job_file_path)
self.commands = commands
self.command = (os.path.join(self.machine.sat_path, "sat") +
" -l " +
os.path.join(self.machine.sat_path,
"list_log_files.txt") +
- " job --jobs_config .jobs_command_file" +
+ " job --jobs_config " +
+ os.path.join(self.machine.sat_path,
+ self.name_remote_jobs_pyconf) +
" --name " +
self.name)
+ if prefix:
+ self.command = prefix + ' "' + self.command +'"'
def get_pids(self):
""" Get the pid(s) corresponding to the command that have been launched
:return: (the output of the kill, the error of the kill)
:rtype: (str, str)
'''
-
- pids = self.get_pids()
+ try:
+ pids = self.get_pids()
+ except:
+ return ("Unable to get the pid of the command.", "")
+
cmd_kill = " ; ".join([("kill -2 " + pid) for pid in pids])
(_, out_kill, err_kill) = self.machine.exec_command(cmd_kill,
self.logger)
time.sleep(wait)
- return (out_kill, err_kill)
+ return (out_kill.read().decode(), err_kill.read().decode())
def has_begun(self):
'''Returns True if the job has already begun
# Put end time
self._Tf = time.time()
# And get the remote command status and log files
- self.get_log_files()
+ try:
+ self.get_log_files()
+ except Exception as e:
+ self.err += _("Unable to get remote log files: %s" % e)
return self._has_finished
"""In case of a failing job, one has to cancel every job that depend
on it. This method put the job as failed and will not be executed.
"""
+ if self.cancelled:
+ return
self._has_begun = True
self._has_finished = True
self.cancelled = True
self._has_finished = True
self._has_timouted = True
self._Tf = time.time()
- self.get_pids()
(out_kill, _) = self.kill_remote_process()
- self.out += "TIMEOUT \n" + out_kill.read().decode()
+ self.out += "TIMEOUT \n" + out_kill
self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
try:
self.get_log_files()
except Exception as e:
- self.err += _("Unable to get remote log files: %s" % e)
+ # The 2 following lines must be suppressed after the bug is fixed
+ print("The error type: ")
+ print(type(e))
+ print("The error: ")
+ print(e)
+ print("Local scope:")
+ print(dir())
+ print("The str type: ")
+ print(type(str))
+ print("str: ")
+ print(str)
+ self.err += _("Unable to get remote log files!")
def total_duration(self):
"""Give the total duration of the job
'''
name = job_def.name
cmmnds = job_def.commands
- timeout = job_def.timeout
+ if not "timeout" in job_def:
+ timeout = 4*60*60 # default timeout = 4h
+ else:
+ timeout = job_def.timeout
after = None
if 'after' in job_def:
after = job_def.after
board = None
if 'board' in job_def:
board = job_def.board
+ prefix = None
+ if "prefix" in job_def:
+ prefix = job_def.prefix
return Job(name,
machine,
cmmnds,
timeout,
self.runner.cfg,
+ self.job_file_path,
self.logger,
- after = after)
+ after = after,
+ prefix = prefix)
def determine_jobs_and_machines(self):
'''Function that reads the pyconf jobs definition and instantiates all
msg = _("WARNING: The job \"%(job_name)s\" requires the "
"machine \"%(machine_name)s\" but this machine "
"is not defined in the configuration file.\n"
- "The job will not be launched")
- self.logger.write(src.printcolors.printcWarning(msg))
+ "The job will not be launched\n")
+ self.logger.write(src.printcolors.printcWarning(
+ msg % {"job_name" : job_def.name,
+ "machine_name" : name_machine}))
+ continue
a_job = self.define_job(job_def, a_machine)
# Copy salomeTools to the remote machine
if machine.successfully_connected(self.logger):
+ step = _("Remove SAT")
+ self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
+ self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
+ (__, out_dist, __) = machine.exec_command(
+ "rm -rf %s" % machine.sat_path,
+ self.logger)
+ out_dist.read()
+
+ self.logger.flush()
step = _("Copy SAT")
self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
self.logger.write('\r%s%s%s %s' %
(begin_line,
endline,
- src.printcolors.printc(src.OK_STATUS),
- _("Copy of SAT failed")), 3)
+ src.printcolors.printc(src.KO_STATUS),
+ _("Copy of SAT failed: %s" % res_copy)), 3)
else:
self.logger.write('\r%s' %
((len(begin_line)+len(endline)+20) * " "), 3)
see the jobs states
'''
- def __init__(self, xml_dir_path, l_jobs, l_jobs_not_today, file_boards=""):
+ def __init__(self,
+ xml_dir_path,
+ l_jobs,
+ l_jobs_not_today,
+ prefix,
+ logger,
+ file_boards=""):
'''Initialization
:param xml_dir_path str: The path to the directory where to put
:param file_boards str: the file path from which to read the
expected boards
'''
+ # The logging instance
+ self.logger = logger
+
+ # The prefix to add to the xml files : date_hour
+ self.prefix = prefix
+
# The path of the csv files to read to fill the expected boards
self.file_boards = file_boards
- today = datetime.date.weekday(datetime.date.today())
- self.parse_csv_boards(today)
+ if file_boards != "":
+ today = datetime.date.weekday(datetime.date.today())
+ self.parse_csv_boards(today)
+ else:
+ self.d_input_boards = {}
# The path of the global xml file
self.xml_dir_path = xml_dir_path
# Initialize the xml files
- xml_global_path = os.path.join(self.xml_dir_path, "global_report.xml")
+ self.global_name = "global_report"
+ xml_global_path = os.path.join(self.xml_dir_path,
+ self.global_name + ".xml")
self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path,
"JobsReport")
+
+ # Find history for each job
+ self.history = {}
+ self.find_history(l_jobs, l_jobs_not_today)
+
# The xml files that corresponds to the boards.
# {name_board : xml_object}}
self.d_xml_board_files = {}
+
# Create the lines and columns
self.initialize_boards(l_jobs, l_jobs_not_today)
-
+
# Write the xml file
self.update_xml_files(l_jobs)
def add_xml_board(self, name):
+ '''Add a board to the board list
+ :param name str: the board name
+ '''
xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
self.d_xml_board_files[name] = src.xmlManager.XmlLogFile(
xml_board_path,
continue
for board in self.d_xml_board_files:
if board_job == board:
- if distrib is not None and distrib not in d_dist[board]:
+ if (distrib not in [None, ''] and
+ distrib not in d_dist[board]):
d_dist[board].append(distrib)
src.xmlManager.add_simple_node(
self.d_xml_board_files[board].xmlroot.find(
attrib={"name" : distrib})
if board_job == board:
- if (application is not None and
+ if (application not in [None, ''] and
application not in d_application[board]):
d_application[board].append(application)
src.xmlManager.add_simple_node(
# that will not be launched today
self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
+ # add also the infos node
xml_file.add_simple_node("infos",
attrib={"name" : "last update",
"JobsCommandStatus" : "running"})
-
+
+ # and put the history node
+ history_node = xml_file.add_simple_node("history")
+ name_board = os.path.basename(xml_file.logFile)[:-len(".xml")]
+ # serach for board files
+ expression = "^[0-9]{8}_+[0-9]{6}_" + name_board + ".xml$"
+ oExpr = re.compile(expression)
+ # Get the list of xml borad files that are in the log directory
+ for file_name in os.listdir(self.xml_dir_path):
+ if oExpr.search(file_name):
+ date = os.path.basename(file_name).split("_")[0]
+ file_path = os.path.join(self.xml_dir_path, file_name)
+ src.xmlManager.add_simple_node(history_node,
+ "link",
+ text=file_path,
+ attrib={"date" : date})
+
+
# Find in each board the squares that needs to be filled regarding the
# input csv files but that are not covered by a today job
for board in self.d_input_boards.keys():
xml_root_board = self.d_xml_board_files[board].xmlroot
+ # Find the missing jobs for today
xml_missing = src.xmlManager.add_simple_node(xml_root_board,
"missing_jobs")
for row, column in self.d_input_boards[board]["jobs"]:
"job",
attrib={"distribution" : row,
"application" : column })
-
+ # Find the missing jobs not today
+ xml_missing_not_today = src.xmlManager.add_simple_node(
+ xml_root_board,
+ "missing_jobs_not_today")
+ for row, column in self.d_input_boards[board]["jobs_not_today"]:
+ found = False
+ for job in l_jobs_not_today:
+ if (job.application == column and
+ job.machine.distribution == row):
+ found = True
+ break
+ if not found:
+ src.xmlManager.add_simple_node(xml_missing_not_today,
+ "job",
+ attrib={"distribution" : row,
+ "application" : column })
+
+ def find_history(self, l_jobs, l_jobs_not_today):
+ """find, for each job, in the existent xml boards the results for the
+ job. Store the results in the dictionnary self.history = {name_job :
+ list of (date, status, list links)}
+
+ :param l_jobs List: the list of jobs to run today
+ :param l_jobs_not_today List: the list of jobs that do not run today
+ """
+ # load the all the history
+ expression = "^[0-9]{8}_+[0-9]{6}_" + self.global_name + ".xml$"
+ oExpr = re.compile(expression)
+ # Get the list of global xml that are in the log directory
+ l_globalxml = []
+ for file_name in os.listdir(self.xml_dir_path):
+ if oExpr.search(file_name):
+ file_path = os.path.join(self.xml_dir_path, file_name)
+ try:
+ global_xml = src.xmlManager.ReadXmlFile(file_path)
+ l_globalxml.append(global_xml)
+ except Exception as e:
+ msg = _("\nWARNING: the file %s can not be read, it will be "
+ "ignored\n%s" % (file_path, e))
+ self.logger.write("%s\n" % src.printcolors.printcWarning(
+ msg), 5)
+
+ # Construct the dictionnary self.history
+ for job in l_jobs + l_jobs_not_today:
+ l_links = []
+ for global_xml in l_globalxml:
+ date = os.path.basename(global_xml.filePath).split("_")[0]
+ global_root_node = global_xml.xmlroot.find("jobs")
+ job_node = src.xmlManager.find_node_by_attrib(
+ global_root_node,
+ "job",
+ "name",
+ job.name)
+ if job_node:
+ if job_node.find("remote_log_file_path") is not None:
+ link = job_node.find("remote_log_file_path").text
+ res_job = job_node.find("res").text
+ if link != "nothing":
+ l_links.append((date, res_job, link))
+ l_links = sorted(l_links, reverse=True)
+ self.history[job.name] = l_links
+
def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
'''Get all the first information needed for each file and write the
first version of the files
src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
src.xmlManager.add_simple_node(xmlj, "sat_path",
job.machine.sat_path)
+ xml_history = src.xmlManager.add_simple_node(xmlj, "history")
+ for i, (date, res_job, link) in enumerate(self.history[job.name]):
+ if i==0:
+ # tag the first one (the last one)
+ src.xmlManager.add_simple_node(xml_history,
+ "link",
+ text=link,
+ attrib={"date" : date,
+ "res" : res_job,
+ "last" : "yes"})
+ else:
+ src.xmlManager.add_simple_node(xml_history,
+ "link",
+ text=link,
+ attrib={"date" : date,
+ "res" : res_job,
+ "last" : "no"})
def parse_csv_boards(self, today):
- """ Parse the csv files that describes the boards to produce and fill
+ """ Parse the csv file that describes the boards to produce and fill
the dict d_input_boards that contain the csv file contain
:param today int: the current day of the week
"""
- # loop over each csv file and read its content
+ # open the csv file and read its content
l_read = []
with open(self.file_boards, 'r') as f:
reader = csv.reader(f,delimiter=CSV_DELIMITER)
rows = []
jobs = []
+ jobs_not_today = []
for line in input_board[1:]:
row = line[0]
+ rows.append(row)
for i, square in enumerate(line[1:]):
if square=='':
continue
days = square.split(DAYS_SEPARATOR)
days = [int(day) for day in days]
- if today in days:
- if row not in rows:
- rows.append(row)
- job = (row, columns[i])
+ job = (row, columns[i])
+ if today in days:
jobs.append(job)
+ else:
+ jobs_not_today.append(job)
d_boards[board_name] = {"rows" : rows,
"columns" : columns,
- "jobs" : jobs}
+ "jobs" : jobs,
+ "jobs_not_today" : jobs_not_today}
self.d_input_boards = d_boards
src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
+ xml_history = src.xmlManager.add_simple_node(xmlj, "history")
+ for date, res_job, link in self.history[job.name]:
+ src.xmlManager.add_simple_node(xml_history,
+ "link",
+ text=link,
+ attrib={"date" : date,
+ "res" : res_job})
+
src.xmlManager.add_simple_node(xmlj, "sat_path",
job.machine.sat_path)
src.xmlManager.add_simple_node(xmlj, "application", job.application)
src.xmlManager.add_simple_node(xmlj,
"remote_log_file_path",
"nothing")
+ # Search for the test log if there is any
+ l_test_log_files = self.find_test_log(job.remote_log_files)
+ xml_test = src.xmlManager.add_simple_node(xmlj,
+ "test_log_file_path")
+ for test_log_path, res_test, nb_fails in l_test_log_files:
+ test_path_node = src.xmlManager.add_simple_node(xml_test,
+ "path",
+ test_log_path)
+ test_path_node.attrib["res"] = res_test
+ test_path_node.attrib["nb_fails"] = nb_fails
xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
# get the job father
datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
+ def find_test_log(self, l_remote_log_files):
+ '''Find if there is a test log (board) in the remote log files and
+ the path to it. There can be several test command, so the result is
+ a list.
+
+ :param l_remote_log_files List: the list of all remote log files
+ :return: the list of (test log files path, res of the command)
+ :rtype: List
+ '''
+ res = []
+ for file_path in l_remote_log_files:
+ dirname = os.path.basename(os.path.dirname(file_path))
+ file_name = os.path.basename(file_path)
+ regex = src.logger.log_all_command_file_expression
+ oExpr = re.compile(regex)
+ if dirname == "TEST" and oExpr.search(file_name):
+ # find the res of the command
+ prod_node = etree.parse(file_path).getroot().find("product")
+ res_test = prod_node.attrib["global_res"]
+ # find the number of fails
+ testbase_node = prod_node.find("tests").find("testbase")
+ nb_fails = int(testbase_node.attrib["failed"])
+ # put the file path, the res of the test command and the number
+ # of fails in the output
+ res.append((file_path, res_test, nb_fails))
+
+ return res
def last_update(self, finish_status = "finished"):
'''update information about the jobs for the file xml_file
attrib={"JobsCommandStatus" : finish_status})
# Write the file
self.write_xml_files()
-
+
+ def write_xml_file(self, xml_file, stylesheet):
+ ''' Write one xml file and the same file with prefix
+ '''
+ xml_file.write_tree(stylesheet)
+ file_path = xml_file.logFile
+ file_dir = os.path.dirname(file_path)
+ file_name = os.path.basename(file_path)
+ file_name_with_prefix = self.prefix + "_" + file_name
+ xml_file.write_tree(stylesheet, os.path.join(file_dir,
+ file_name_with_prefix))
+
def write_xml_files(self):
''' Write the xml files
'''
- self.xml_global_file.write_tree(STYLESHEET_GLOBAL)
+ self.write_xml_file(self.xml_global_file, STYLESHEET_GLOBAL)
for xml_file in self.d_xml_board_files.values():
- xml_file.write_tree(STYLESHEET_BOARD)
-
+ self.write_xml_file(xml_file, STYLESHEET_BOARD)
+
+def get_config_file_path(job_config_name, l_cfg_dir):
+ found = False
+ file_jobs_cfg = None
+ if os.path.exists(job_config_name) and job_config_name.endswith(".pyconf"):
+ found = True
+ file_jobs_cfg = job_config_name
+ else:
+ for cfg_dir in l_cfg_dir:
+ file_jobs_cfg = os.path.join(cfg_dir, job_config_name)
+ if not file_jobs_cfg.endswith('.pyconf'):
+ file_jobs_cfg += '.pyconf'
+
+ if not os.path.exists(file_jobs_cfg):
+ continue
+ else:
+ found = True
+ break
+ return found, file_jobs_cfg
+
##
# Describes the command
def description():
return _("The jobs command launches maintenances that are described"
- " in the dedicated jobs configuration file.")
+ " in the dedicated jobs configuration file.\n\nexample:\nsat "
+ "jobs --name my_jobs --publish")
##
# Runs the command.
if not options.no_label:
logger.write("------ %s\n" %
src.printcolors.printcHeader(cfg_dir))
-
+ if not os.path.exists(cfg_dir):
+ continue
for f in sorted(os.listdir(cfg_dir)):
if not f.endswith('.pyconf'):
continue
src.printcolors.printcError(message)
return 1
- # Find the file in the directories
- found = False
- for cfg_dir in l_cfg_dir:
- file_jobs_cfg = os.path.join(cfg_dir, options.jobs_cfg)
- if not file_jobs_cfg.endswith('.pyconf'):
- file_jobs_cfg += '.pyconf'
-
- if not os.path.exists(file_jobs_cfg):
- continue
- else:
- found = True
- break
-
- if not found:
- msg = _("The file configuration %(name_file)s was not found."
- "\nUse the --list option to get the possible files.")
- src.printcolors.printcError(msg)
- return 1
+ # Find the file in the directories, unless it is a full path
+ # merge all in a config
+ merger = src.pyconf.ConfigMerger()
+ config_jobs = src.pyconf.Config()
+ l_conf_files_path = []
+ for config_file in options.jobs_cfg:
+ found, file_jobs_cfg = get_config_file_path(config_file, l_cfg_dir)
+ if not found:
+ msg = _("The file configuration %s was not found."
+ "\nUse the --list option to get the "
+ "possible files." % config_file)
+ logger.write("%s\n" % src.printcolors.printcError(msg), 1)
+ return 1
+ l_conf_files_path.append(file_jobs_cfg)
+ # Read the config that is in the file
+ one_config_jobs = src.read_config_from_a_file(file_jobs_cfg)
+ merger.merge(config_jobs, one_config_jobs)
info = [
(_("Platform"), runner.cfg.VARS.dist),
- (_("File containing the jobs configuration"), file_jobs_cfg)
+ (_("Files containing the jobs configuration"), l_conf_files_path)
]
src.print_info(logger, info)
- # Read the config that is in the file
- config_jobs = src.read_config_from_a_file(file_jobs_cfg)
if options.only_jobs:
l_jb = src.pyconf.Sequence()
for jb in config_jobs.jobs:
if jb.name in options.only_jobs:
l_jb.append(jb,
- "Adding a job that was given in only_jobs option parameters")
+ "Job that was given in only_jobs option parameters\n")
config_jobs.jobs = l_jb
-
+
+ # Make a unique file that contain all the jobs in order to use it
+ # on every machine
+ name_pyconf = "_".join([os.path.basename(path)[:-len('.pyconf')]
+ for path in l_conf_files_path]) + ".pyconf"
+ path_pyconf = src.get_tmp_filename(runner.cfg, name_pyconf)
+ #Save config
+ f = file( path_pyconf , 'w')
+ config_jobs.__save__(f)
+
+ # log the paramiko problems
+ paramiko_log_dir_path = os.path.join(runner.cfg.USER.log_dir, "JOBS")
+ src.ensure_path_exists(paramiko_log_dir_path)
+ paramiko.util.log_to_file(os.path.join(paramiko_log_dir_path,
+ logger.txtFileName))
+
# Initialization
today_jobs = Jobs(runner,
logger,
- file_jobs_cfg,
+ path_pyconf,
config_jobs)
# SSH connection to all machines
today_jobs.ssh_connection_all_machines()
gui = None
if options.publish:
+ logger.write(src.printcolors.printcInfo(
+ _("Initialize the xml boards : ")), 5)
+ logger.flush()
+
# Copy the stylesheets in the log directory
- log_dir = runner.cfg.SITE.log.log_dir
+ log_dir = runner.cfg.USER.log_dir
xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
files_to_copy = []
files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
# Instanciate the Gui in order to produce the xml files that contain all
# the boards
- gui = Gui(runner.cfg.SITE.log.log_dir,
+ gui = Gui(runner.cfg.USER.log_dir,
today_jobs.ljobs,
today_jobs.ljobs_not_today,
+ runner.cfg.VARS.datehour,
+ logger,
file_boards = options.input_boards)
+ logger.write(src.printcolors.printcSuccess("OK"), 5)
+ logger.write("\n\n", 5)
+ logger.flush()
+
# Display the list of the xml files
logger.write(src.printcolors.printcInfo(("Here is the list of published"
" files :\n")), 4)
file_name = os.path.basename(file_path)
logger.write("%s\n" % file_path, 4)
logger.add_link(file_name, "board", 0, board)
-
+
logger.write("\n", 4)
-
+
today_jobs.gui = gui
interruped = False
interruped = True
logger.write("\n\n%s\n\n" %
(src.printcolors.printcWarning(_("Forced interruption"))), 1)
+ except Exception as e:
+ msg = _("CRITICAL ERROR: The jobs loop has been interrupted\n")
+ logger.write("\n\n%s\n" % src.printcolors.printcError(msg) )
+ logger.write("%s\n" % str(e))
+ # get stack
+ __, __, exc_traceback = sys.exc_info()
+ fp = tempfile.TemporaryFile()
+ traceback.print_tb(exc_traceback, file=fp)
+ fp.seek(0)
+ stack = fp.read()
+ logger.write("\nTRACEBACK: %s\n" % stack.replace('"',"'"), 1)
+
finally:
+ res = 0
if interruped:
+ res = 1
msg = _("Killing the running jobs and trying"
" to get the corresponding logs\n")
logger.write(src.printcolors.printcWarning(msg))
# find the potential not finished jobs and kill them
for jb in today_jobs.ljobs:
if not jb.has_finished():
+ res = 1
try:
jb.kill_remote_process()
except Exception as e:
msg = _("Failed to kill job %s: %s\n" % (jb.name, e))
logger.write(src.printcolors.printcWarning(msg))
+ if jb.res_job != "0":
+ res = 1
if interruped:
if today_jobs.gui:
today_jobs.gui.last_update(_("Forced interruption"))
today_jobs.gui.last_update()
# Output the results
today_jobs.write_all_results()
+ # Remove the temporary pyconf file
+ if os.path.exists(path_pyconf):
+ os.remove(path_pyconf)
+ return res