import os
import datetime
import time
+import csv
+import shutil
+import itertools
import paramiko
import src
STYLESHEET_GLOBAL = "jobs_global_report.xsl"
STYLESHEET_BOARD = "jobs_board_report.xsl"
+DAYS_SEPARATOR = ","
+CSV_DELIMITER = ";"
+
parser = src.options.Options()
-parser.add_option('j', 'jobs_config', 'string', 'jobs_cfg',
+parser.add_option('n', 'name', 'string', 'jobs_cfg',
_('The name of the config file that contains'
' the jobs configuration'))
parser.add_option('o', 'only_jobs', 'list2', 'only_jobs',
- _('The list of jobs to launch, by their name. '))
+ _('Optional: the list of jobs to launch, by their name. '))
parser.add_option('l', 'list', 'boolean', 'list',
- _('list all available config files.'))
-parser.add_option('n', 'no_label', 'boolean', 'no_label',
- _("do not print labels, Works only with --list."), False)
+ _('Optional: list all available config files.'))
parser.add_option('t', 'test_connection', 'boolean', 'test_connection',
- _("Try to connect to the machines. Not executing the jobs."),
+ _("Optional: try to connect to the machines. "
+ "Not executing the jobs."),
False)
parser.add_option('p', 'publish', 'boolean', 'publish',
- _("Generate an xml file that can be read in a browser to "
- "display the jobs status."),
+ _("Optional: generate an xml file that can be read in a "
+ "browser to display the jobs status."),
+ False)
+parser.add_option('i', 'input_boards', 'string', 'input_boards', _("Optional: "
+ "the path to csv file that contain "
+ "the expected boards."),"")
+parser.add_option('n', 'completion', 'boolean', 'no_label',
+ _("Optional (internal use): do not print labels, Works only "
+ "with --list."),
False)
class Machine(object):
self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
# put the job configuration file in order to make it reachable
# on the remote machine
- job_file_name = os.path.basename(job_file)
- self.sftp.put(job_file, os.path.join(self.sat_path,
- "data",
- "jobs",
- job_file_name))
+ self.sftp.put(job_file, os.path.join(".salomeTools",
+ "Jobs",
+ ".jobs_command_file.pyconf"))
except Exception as e:
res = str(e)
self._connection_successful = False
def put_dir(self, source, target, filters = []):
''' Uploads the contents of the source directory to the target path. The
- target directory needs to exists. All subdirectories in source are
+ target directory needs to exists. All sub-directories in source are
created under target.
'''
for item in os.listdir(source):
'''Class to manage one job
'''
def __init__(self, name, machine, application, board,
- commands, timeout, config, logger, job_file, after=None):
+ commands, timeout, config, logger, after=None):
self.name = name
self.machine = machine
self._stdout = None # Store the command outputs field
self._stderr = None # Store the command errors field
- self.out = None # Contains something only if the job is finished
- self.err = None # Contains something only if the job is finished
+ self.out = ""
+ self.err = ""
self.commands = commands
self.command = (os.path.join(self.machine.sat_path, "sat") +
" -l " +
os.path.join(self.machine.sat_path,
"list_log_files.txt") +
- " job --jobs_config " +
- job_file +
+ " job --jobs_config .jobs_command_file" +
" --name " +
self.name)
if self._stdout.channel.closed:
self._has_finished = True
# Store the result outputs
- self.out = self._stdout.read().decode()
- self.err = self._stderr.read().decode()
+ self.out += self._stdout.read().decode()
+ self.err += self._stderr.read().decode()
# Put end time
self._Tf = time.time()
# And get the remote command status and log files
def get_log_files(self):
"""Get the log files produced by the command launched
- on the remote machine.
+ on the remote machine, and put it in the log directory of the user,
+ so they can be accessible from
"""
# Do not get the files if the command is not finished
if not self.has_finished():
# First get the file that contains the list of log files to get
tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
+ remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt")
self.machine.sftp.get(
- os.path.join(self.machine.sat_path, "list_log_files.txt"),
+ remote_path,
tmp_file_path)
# Read the file and get the result of the command and all the log files
file_lines = [line.replace("\n", "") for line in file_lines]
fstream_tmp.close()
os.remove(tmp_file_path)
- # The first line is the result of the command (0 success or 1 fail)
- self.res_job = file_lines[0]
+
+ try :
+ # The first line is the result of the command (0 success or 1 fail)
+ self.res_job = file_lines[0]
+ except Exception as e:
+ self.err += _("Unable to get status from remote file %s: %s" %
+ (remote_path, str(e)))
for i, job_path_remote in enumerate(file_lines[1:]):
try:
# internal traces.
# 2- The txt file containing the system command traces (like
# traces produced by the "make" command)
- if os.path.basename(os.path.dirname(job_path_remote)) != 'OUT':
+ # 3- In case of the test command, there is another file to get :
+ # the xml board that contain the test results
+ dirname = os.path.basename(os.path.dirname(job_path_remote))
+ if dirname != 'OUT' and dirname != 'TEST':
# Case 1-
local_path = os.path.join(os.path.dirname(
self.logger.logFilePath),
"job",
self.res_job,
self.command)
- else:
+ elif dirname == 'OUT':
# Case 2-
local_path = os.path.join(os.path.dirname(
self.logger.logFilePath),
'OUT',
os.path.basename(job_path_remote))
+ elif dirname == 'TEST':
+ # Case 3-
+ local_path = os.path.join(os.path.dirname(
+ self.logger.logFilePath),
+ 'TEST',
+ os.path.basename(job_path_remote))
+
# Get the file
if not os.path.exists(local_path):
self.machine.sftp.get(job_path_remote, local_path)
self.remote_log_files.append(local_path)
except Exception as e:
self.err += _("Unable to get %s log file from remote: %s" %
- (job_path_remote, str(e)))
+ (str(job_path_remote),
+ str(e)))
def has_failed(self):
'''Returns True if the job has failed.
self._has_begun = True
self._has_finished = True
self.cancelled = True
- self.out = _("This job was not launched because its father has failed.")
- self.err = _("This job was not launched because its father has failed.")
+ self.out += _("This job was not launched because its father has failed.")
+ self.err += _("This job was not launched because its father has failed.")
def is_running(self):
'''Returns True if the job commands are running
self._Tf = time.time()
self.get_pids()
(out_kill, _) = self.kill_remote_process()
- self.out = "TIMEOUT \n" + out_kill.read().decode()
- self.err = "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
+ self.out += "TIMEOUT \n" + out_kill.read().decode()
+ self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
try:
self.get_log_files()
except Exception as e:
if not self.machine.successfully_connected(self.logger):
self._has_finished = True
self.out = "N\A"
- self.err = ("Connection to machine (name : %s, host: %s, port:"
+ self.err += ("Connection to machine (name : %s, host: %s, port:"
" %s, user: %s) has failed\nUse the log command "
"to get more information."
% (self.machine.name,
# Usual case : Launch the command on remote machine
self._T0 = time.time()
self._stdin, self._stdout, self._stderr = self.machine.exec_command(
- self.command, self.logger)
+ self.command,
+ self.logger)
# If the results are not initialized, finish the job
if (self._stdin, self._stdout, self._stderr) == (None, None, None):
self._has_finished = True
self._Tf = time.time()
- self.out = "N\A"
- self.err = "The server failed to execute the command"
+ self.out += "N\A"
+ self.err += "The server failed to execute the command"
# Put the beginning flag to true.
self._has_begun = True
self.machine.write_info(self.logger)
self.logger.write(src.printcolors.printcInfo("out : \n"))
- if self.out is None:
+ if self.out == "":
self.logger.write("Unable to get output\n")
else:
self.logger.write(self.out + "\n")
self.logger.write(src.printcolors.printcInfo("err : \n"))
- if self.err is None:
- self.logger.write("Unable to get error\n")
- else:
- self.logger.write(self.err + "\n")
+ self.logger.write(self.err + "\n")
def get_status(self):
"""Get the status of the job (used by the Gui for xml display)
def __init__(self,
runner,
logger,
- job_file,
job_file_path,
config_jobs,
lenght_columns = 20):
# The jobs configuration
self.cfg_jobs = config_jobs
- self.job_file = job_file
self.job_file_path = job_file_path
# The machine that will be used today
self.lmachines = []
'''
name = job_def.name
cmmnds = job_def.commands
- timeout = job_def.timeout
+ if not "timeout" in job_def:
+ timeout = 4*60*60 # default timeout = 4h
+ else:
+ timeout = job_def.timeout
after = None
if 'after' in job_def:
after = job_def.after
timeout,
self.runner.cfg,
self.logger,
- self.job_file,
after = after)
def determine_jobs_and_machines(self):
jb_before = self.find_job_that_has_name(jb.after)
if jb_before is None:
jb.cancel()
- msg = _("This job was not launched because its father is not in the jobs list.")
+ msg = _("This job was not launched because its "
+ "father is not in the jobs list.")
jb.out = msg
jb.err = msg
break
new_job_finished = self.update_jobs_states_list()
if new_job_start or new_job_finished:
- self.gui.update_xml_files(self.ljobs)
+ if self.gui:
+ self.gui.update_xml_files(self.ljobs)
# Display the current status
self.display_status(self.len_columns)
self.logger.write(tiret_line)
self.logger.write("\n\n")
- self.gui.update_xml_files(self.ljobs)
- self.gui.last_update()
+ if self.gui:
+ self.gui.update_xml_files(self.ljobs)
+ self.gui.last_update()
def write_all_results(self):
'''Display all the jobs outputs.
see the jobs states
'''
- def __init__(self, xml_dir_path, l_jobs, l_jobs_not_today):
+ def __init__(self, xml_dir_path, l_jobs, l_jobs_not_today, file_boards=""):
'''Initialization
:param xml_dir_path str: The path to the directory where to put
the xml resulting files
:param l_jobs List: the list of jobs that run today
:param l_jobs_not_today List: the list of jobs that do not run today
+ :param file_boards str: the file path from which to read the
+ expected boards
'''
+ # The path of the csv files to read to fill the expected boards
+ self.file_boards = file_boards
+
+ today = datetime.date.weekday(datetime.date.today())
+ self.parse_csv_boards(today)
+
# The path of the global xml file
self.xml_dir_path = xml_dir_path
# Initialize the xml files
# {name_board : xml_object}}
self.d_xml_board_files = {}
# Create the lines and columns
- self.initialize_arrays(l_jobs, l_jobs_not_today)
+ self.initialize_boards(l_jobs, l_jobs_not_today)
+
# Write the xml file
self.update_xml_files(l_jobs)
- def initialize_arrays(self, l_jobs, l_jobs_not_today):
+ def add_xml_board(self, name):
+ xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
+ self.d_xml_board_files[name] = src.xmlManager.XmlLogFile(
+ xml_board_path,
+ "JobsReport")
+ self.d_xml_board_files[name].add_simple_node("distributions")
+ self.d_xml_board_files[name].add_simple_node("applications")
+ self.d_xml_board_files[name].add_simple_node("board", text=name)
+
+ def initialize_boards(self, l_jobs, l_jobs_not_today):
'''Get all the first information needed for each file and write the
first version of the files
:param l_jobs List: the list of jobs that run today
for job in l_jobs + l_jobs_not_today:
board = job.board
if (board is not None and
- board not in self.d_xml_board_files.keys()):
- xml_board_path = os.path.join(self.xml_dir_path, board + ".xml")
- self.d_xml_board_files[board] = src.xmlManager.XmlLogFile(
- xml_board_path,
- "JobsReport")
- self.d_xml_board_files[board].add_simple_node("distributions")
- self.d_xml_board_files[board].add_simple_node("applications")
- self.d_xml_board_files[board].add_simple_node("board", text=board)
+ board not in self.d_xml_board_files.keys()):
+ self.add_xml_board(board)
+
+ # Verify that the boards given as input are done
+ for board in list(self.d_input_boards.keys()):
+ if board not in self.d_xml_board_files:
+ self.add_xml_board(board)
+ root_node = self.d_xml_board_files[board].xmlroot
+ src.xmlManager.append_node_attrib(root_node,
+ {"input_file" : self.file_boards})
# Loop over all jobs in order to get the lines and columns for each
# xml file
if distrib is not None and distrib not in d_dist[board]:
d_dist[board].append(distrib)
src.xmlManager.add_simple_node(
- self.d_xml_board_files[board].xmlroot.find('distributions'),
+ self.d_xml_board_files[board].xmlroot.find(
+ 'distributions'),
"dist",
attrib={"name" : distrib})
application not in d_application[board]):
d_application[board].append(application)
src.xmlManager.add_simple_node(
- self.d_xml_board_files[board].xmlroot.find('applications'),
+ self.d_xml_board_files[board].xmlroot.find(
+ 'applications'),
"application",
- attrib={"name" : application})
-
+ attrib={
+ "name" : application})
+
+ # Verify that there are no missing application or distribution in the
+ # xml board files (regarding the input boards)
+ for board in self.d_xml_board_files:
+ l_dist = d_dist[board]
+ if board not in self.d_input_boards.keys():
+ continue
+ for dist in self.d_input_boards[board]["rows"]:
+ if dist not in l_dist:
+ src.xmlManager.add_simple_node(
+ self.d_xml_board_files[board].xmlroot.find(
+ 'distributions'),
+ "dist",
+ attrib={"name" : dist})
+ l_appli = d_application[board]
+ for appli in self.d_input_boards[board]["columns"]:
+ if appli not in l_appli:
+ src.xmlManager.add_simple_node(
+ self.d_xml_board_files[board].xmlroot.find(
+ 'applications'),
+ "application",
+ attrib={"name" : appli})
+
# Initialize the hosts_ports node for the global file
- self.xmlhosts_ports = self.xml_global_file.add_simple_node("hosts_ports")
+ self.xmlhosts_ports = self.xml_global_file.add_simple_node(
+ "hosts_ports")
for host, port in l_hosts_ports:
host_port = "%s:%i" % (host, port)
src.xmlManager.add_simple_node(self.xmlhosts_ports,
attrib={"name" : host_port})
# Initialize the jobs node in all files
- for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
+ for xml_file in [self.xml_global_file] + list(
+ self.d_xml_board_files.values()):
xml_jobs = xml_file.add_simple_node("jobs")
# Get the jobs present in the config file but
# that will not be launched today
xml_file.add_simple_node("infos",
attrib={"name" : "last update",
"JobsCommandStatus" : "running"})
-
+
+ # Find in each board the squares that needs to be filled regarding the
+ # input csv files but that are not covered by a today job
+ for board in self.d_input_boards.keys():
+ xml_root_board = self.d_xml_board_files[board].xmlroot
+ xml_missing = src.xmlManager.add_simple_node(xml_root_board,
+ "missing_jobs")
+ for row, column in self.d_input_boards[board]["jobs"]:
+ found = False
+ for job in l_jobs:
+ if (job.application == column and
+ job.machine.distribution == row):
+ found = True
+ break
+ if not found:
+ src.xmlManager.add_simple_node(xml_missing,
+ "job",
+ attrib={"distribution" : row,
+ "application" : column })
def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
'''Get all the first information needed for each file and write the
src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
src.xmlManager.add_simple_node(xmlj, "sat_path",
job.machine.sat_path)
-
+
+ def parse_csv_boards(self, today):
+ """ Parse the csv file that describes the boards to produce and fill
+ the dict d_input_boards that contain the csv file contain
+
+ :param today int: the current day of the week
+ """
+ # open the csv file and read its content
+ l_read = []
+ with open(self.file_boards, 'r') as f:
+ reader = csv.reader(f,delimiter=CSV_DELIMITER)
+ for row in reader:
+ l_read.append(row)
+ # get the delimiter for the boards (empty line)
+ boards_delimiter = [''] * len(l_read[0])
+ # Make the list of boards, by splitting with the delimiter
+ l_boards = [list(y) for x, y in itertools.groupby(l_read,
+ lambda z: z == boards_delimiter) if not x]
+
+ # loop over the csv lists of lines and get the rows, columns and jobs
+ d_boards = {}
+ for input_board in l_boards:
+ # get board name
+ board_name = input_board[0][0]
+
+ # Get columns list
+ columns = input_board[0][1:]
+
+ rows = []
+ jobs = []
+ for line in input_board[1:]:
+ row = line[0]
+ for i, square in enumerate(line[1:]):
+ if square=='':
+ continue
+ days = square.split(DAYS_SEPARATOR)
+ days = [int(day) for day in days]
+ if today in days:
+ if row not in rows:
+ rows.append(row)
+ job = (row, columns[i])
+ jobs.append(job)
+
+ d_boards[board_name] = {"rows" : rows,
+ "columns" : columns,
+ "jobs" : jobs}
+
+ self.d_input_boards = d_boards
+
def update_xml_files(self, l_jobs):
'''Write all the xml files with updated information about the jobs
:param l_jobs List: the list of jobs that run today
'''
- for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
+ for xml_file in [self.xml_global_file] + list(
+ self.d_xml_board_files.values()):
self.update_xml_file(l_jobs, xml_file)
# Write the file
link = "nothing"
src.xmlManager.append_node_attrib(xmlafter, {"link" : link})
+ # Verify that the job is to be done today regarding the input csv
+ # files
+ if job.board and job.board in self.d_input_boards.keys():
+ found = False
+ for dist, appli in self.d_input_boards[job.board]["jobs"]:
+ if (job.machine.distribution == dist
+ and job.application == appli):
+ found = True
+ src.xmlManager.add_simple_node(xmlj,
+ "extra_job",
+ "no")
+ break
+ if not found:
+ src.xmlManager.add_simple_node(xmlj,
+ "extra_job",
+ "yes")
+
# Update the date
xml_node_infos = xml_file.xmlroot.find('infos')
def run(args, runner, logger):
(options, args) = parser.parse_args(args)
-
- jobs_cfg_files_dir = runner.cfg.SITE.jobs.config_path
-
- l_cfg_dir = [os.path.join(runner.cfg.VARS.datadir, "jobs"),
- jobs_cfg_files_dir]
-
- # Make sure the path to the jobs config files directory exists
- src.ensure_path_exists(jobs_cfg_files_dir)
+
+ l_cfg_dir = runner.cfg.PATHS.JOBPATH
# list option : display all the available config files
if options.list:
(_("File containing the jobs configuration"), file_jobs_cfg)
]
src.print_info(logger, info)
-
+
# Read the config that is in the file
config_jobs = src.read_config_from_a_file(file_jobs_cfg)
if options.only_jobs:
# Initialization
today_jobs = Jobs(runner,
logger,
- options.jobs_cfg,
file_jobs_cfg,
config_jobs)
# SSH connection to all machines
gui = None
if options.publish:
- gui = Gui(runner.cfg.SITE.log.log_dir,
+ # Copy the stylesheets in the log directory
+ log_dir = runner.cfg.USER.log_dir
+ xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
+ files_to_copy = []
+ files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
+ files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD))
+ files_to_copy.append(os.path.join(xsl_dir, "running.gif"))
+ for file_path in files_to_copy:
+ shutil.copy2(file_path, log_dir)
+
+ # Instanciate the Gui in order to produce the xml files that contain all
+ # the boards
+ gui = Gui(runner.cfg.USER.log_dir,
today_jobs.ljobs,
- today_jobs.ljobs_not_today,)
+ today_jobs.ljobs_not_today,
+ file_boards = options.input_boards)
+
+ # Display the list of the xml files
+ logger.write(src.printcolors.printcInfo(("Here is the list of published"
+ " files :\n")), 4)
+ logger.write("%s\n" % gui.xml_global_file.logFile, 4)
+ for board in gui.d_xml_board_files.keys():
+ file_path = gui.d_xml_board_files[board].logFile
+ file_name = os.path.basename(file_path)
+ logger.write("%s\n" % file_path, 4)
+ logger.add_link(file_name, "board", 0, board)
+
+ logger.write("\n", 4)
today_jobs.gui = gui
# find the potential not finished jobs and kill them
for jb in today_jobs.ljobs:
if not jb.has_finished():
- jb.kill_remote_process()
+ try:
+ jb.kill_remote_process()
+ except Exception as e:
+ msg = _("Failed to kill job %s: %s\n" % (jb.name, e))
+ logger.write(src.printcolors.printcWarning(msg))
if interruped:
- today_jobs.gui.last_update(_("Forced interruption"))
+ if today_jobs.gui:
+ today_jobs.gui.last_update(_("Forced interruption"))
else:
- today_jobs.gui.last_update()
+ if today_jobs.gui:
+ today_jobs.gui.last_update()
# Output the results
today_jobs.write_all_results()