self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
# put the job configuration file in order to make it reachable
# on the remote machine
- self.sftp.put(job_file, os.path.join(".salomeTools",
- "Jobs",
- ".jobs_command_file.pyconf"))
+ remote_job_file_name = ".%s" % os.path.basename(job_file)
+ self.sftp.put(job_file, os.path.join(self.sat_path,
+ remote_job_file_name))
except Exception as e:
res = str(e)
self._connection_successful = False
class Job(object):
'''Class to manage one job
'''
- def __init__(self, name, machine, application, board,
- commands, timeout, config, logger, after=None):
+ def __init__(self,
+ name,
+ machine,
+ application,
+ board,
+ commands,
+ timeout,
+ config,
+ job_file_path,
+ logger,
+ after=None,
+ prefix=None):
self.name = name
self.machine = machine
self.out = ""
self.err = ""
-
+
+ self.name_remote_jobs_pyconf = ".%s" % os.path.basename(job_file_path)
self.commands = commands
self.command = (os.path.join(self.machine.sat_path, "sat") +
" -l " +
os.path.join(self.machine.sat_path,
"list_log_files.txt") +
- " job --jobs_config .jobs_command_file" +
+ " job --jobs_config " +
+ os.path.join(self.machine.sat_path,
+ self.name_remote_jobs_pyconf) +
" --name " +
self.name)
+ if prefix:
+ self.command = prefix + ' "' + self.command +'"'
def get_pids(self):
""" Get the pid(s) corresponding to the command that have been launched
board = None
if 'board' in job_def:
board = job_def.board
+ prefix = None
+ if "prefix" in job_def:
+ prefix = job_def.prefix
return Job(name,
machine,
cmmnds,
timeout,
self.runner.cfg,
+ self.job_file_path,
self.logger,
- after = after)
+ after = after,
+ prefix = prefix)
def determine_jobs_and_machines(self):
'''Function that reads the pyconf jobs definition and instantiates all
# Copy salomeTools to the remote machine
if machine.successfully_connected(self.logger):
+ step = _("Remove SAT")
+ self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
+ self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
+ (__, out_dist, __) = machine.exec_command(
+ "rm -rf %s" % machine.sat_path,
+ self.logger)
+ out_dist.read()
+
+ self.logger.flush()
step = _("Copy SAT")
self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
see the jobs states
'''
- def __init__(self, xml_dir_path, l_jobs, l_jobs_not_today, prefix, file_boards=""):
+ def __init__(self,
+ xml_dir_path,
+ l_jobs,
+ l_jobs_not_today,
+ prefix,
+ logger,
+ file_boards=""):
'''Initialization
:param xml_dir_path str: The path to the directory where to put
:param file_boards str: the file path from which to read the
expected boards
'''
+ # The logging instance
+ self.logger = logger
+
# The prefix to add to the xml files : date_hour
self.prefix = prefix
# Create the lines and columns
self.initialize_boards(l_jobs, l_jobs_not_today)
-
+
# Write the xml file
self.update_xml_files(l_jobs)
for file_name in os.listdir(self.xml_dir_path):
if oExpr.search(file_name):
file_path = os.path.join(self.xml_dir_path, file_name)
- global_xml = src.xmlManager.ReadXmlFile(file_path)
- l_globalxml.append(global_xml)
+ try:
+ global_xml = src.xmlManager.ReadXmlFile(file_path)
+ l_globalxml.append(global_xml)
+ except Exception as e:
+ msg = _("\nWARNING: the file %s can not be read, it will be "
+ "ignored\n%s" % (file_path, e))
+ self.logger.write("%s\n" % src.printcolors.printcWarning(
+ msg), 5)
+
# Construct the dictionnary self.history
for job in l_jobs + l_jobs_not_today:
res_job = job_node.find("res").text
if link != "nothing":
l_links.append((date, res_job, link))
-
+ l_links = sorted(l_links, reverse=True)
self.history[job.name] = l_links
def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
src.xmlManager.add_simple_node(xmlj, "sat_path",
job.machine.sat_path)
xml_history = src.xmlManager.add_simple_node(xmlj, "history")
- for date, res_job, link in self.history[job.name]:
- src.xmlManager.add_simple_node(xml_history,
- "link",
- text=link,
- attrib={"date" : date,
- "res" : res_job})
+ for i, (date, res_job, link) in enumerate(self.history[job.name]):
+ if i==0:
+ # tag the first one (the last one)
+ src.xmlManager.add_simple_node(xml_history,
+ "link",
+ text=link,
+ attrib={"date" : date,
+ "res" : res_job,
+ "last" : "yes"})
+ else:
+ src.xmlManager.add_simple_node(xml_history,
+ "link",
+ text=link,
+ attrib={"date" : date,
+ "res" : res_job,
+ "last" : "no"})
def parse_csv_boards(self, today):
""" Parse the csv file that describes the boards to produce and fill
src.xmlManager.add_simple_node(xmlj,
"remote_log_file_path",
"nothing")
+ # Search for the test log if there is any
+ l_test_log_files = self.find_test_log(job.remote_log_files)
+ xml_test = src.xmlManager.add_simple_node(xmlj,
+ "test_log_file_path")
+ for test_log_path in l_test_log_files:
+ src.xmlManager.add_simple_node(xml_test, "path", test_log_path)
xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
# get the job father
datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
+ def find_test_log(self, l_remote_log_files):
+ '''Find if there is a test log (board) in the remote log files and
+ the path to it. There can be several test command, so the result is
+ a list.
+
+ :param l_remote_log_files List: the list of all remote log files
+ :return: the list of test log files path
+ :rtype: List
+ '''
+ res = []
+ for file_path in l_remote_log_files:
+ dirname = os.path.basename(os.path.dirname(file_path))
+ if dirname == "TEST":
+ res.append(file_path)
+ return res
def last_update(self, finish_status = "finished"):
'''update information about the jobs for the file xml_file
if not options.no_label:
logger.write("------ %s\n" %
src.printcolors.printcHeader(cfg_dir))
-
+ if not os.path.exists(cfg_dir):
+ continue
for f in sorted(os.listdir(cfg_dir)):
if not f.endswith('.pyconf'):
continue
src.printcolors.printcError(message)
return 1
- # Find the file in the directories
+ # Find the file in the directories, unless it is a full path
found = False
- for cfg_dir in l_cfg_dir:
- file_jobs_cfg = os.path.join(cfg_dir, options.jobs_cfg)
- if not file_jobs_cfg.endswith('.pyconf'):
- file_jobs_cfg += '.pyconf'
-
- if not os.path.exists(file_jobs_cfg):
- continue
- else:
- found = True
- break
+ if os.path.exists(options.jobs_cfg):
+ found = True
+ file_jobs_cfg = options.jobs_cfg
+ else:
+ for cfg_dir in l_cfg_dir:
+ file_jobs_cfg = os.path.join(cfg_dir, options.jobs_cfg)
+ if not file_jobs_cfg.endswith('.pyconf'):
+ file_jobs_cfg += '.pyconf'
+
+ if not os.path.exists(file_jobs_cfg):
+ continue
+ else:
+ found = True
+ break
if not found:
msg = _("The file configuration %(name_file)s was not found."
gui = None
if options.publish:
+ logger.write(src.printcolors.printcInfo(
+ _("Initialize the xml boards : ")), 5)
+ logger.flush()
+
# Copy the stylesheets in the log directory
log_dir = runner.cfg.USER.log_dir
xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
today_jobs.ljobs,
today_jobs.ljobs_not_today,
runner.cfg.VARS.datehour,
+ logger,
file_boards = options.input_boards)
+ logger.write(src.printcolors.printcSuccess("OK"), 5)
+ logger.write("\n\n", 5)
+ logger.flush()
+
# Display the list of the xml files
logger.write(src.printcolors.printcInfo(("Here is the list of published"
" files :\n")), 4)
file_name = os.path.basename(file_path)
logger.write("%s\n" % file_path, 4)
logger.add_link(file_name, "board", 0, board)
-
+
logger.write("\n", 4)
-
+
today_jobs.gui = gui
interruped = False
logger.write("\n\n%s\n\n" %
(src.printcolors.printcWarning(_("Forced interruption"))), 1)
finally:
+ res = 0
if interruped:
+ res = 1
msg = _("Killing the running jobs and trying"
" to get the corresponding logs\n")
logger.write(src.printcolors.printcWarning(msg))
# find the potential not finished jobs and kill them
for jb in today_jobs.ljobs:
if not jb.has_finished():
+ res = 1
try:
jb.kill_remote_process()
except Exception as e:
msg = _("Failed to kill job %s: %s\n" % (jb.name, e))
logger.write(src.printcolors.printcWarning(msg))
+ if jb.res_job != "0":
+ res = 1
if interruped:
if today_jobs.gui:
today_jobs.gui.last_update(_("Forced interruption"))
today_jobs.gui.last_update()
# Output the results
today_jobs.write_all_results()
+ return res