3 # Copyright (C) 2010-2013 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
29 import src.ElementTree as etree
31 STYLESHEET_GLOBAL = "jobs_global_report.xsl"
32 STYLESHEET_BOARD = "jobs_board_report.xsl"
37 parser = src.options.Options()
39 parser.add_option('n', 'name', 'list2', 'jobs_cfg',
40 _('Mandatory: The name of the config file that contains'
41 ' the jobs configuration. Can be a list.'))
42 parser.add_option('o', 'only_jobs', 'list2', 'only_jobs',
43 _('Optional: the list of jobs to launch, by their name. '))
44 parser.add_option('l', 'list', 'boolean', 'list',
45 _('Optional: list all available config files.'))
46 parser.add_option('t', 'test_connection', 'boolean', 'test_connection',
47 _("Optional: try to connect to the machines. "
48 "Not executing the jobs."),
50 parser.add_option('p', 'publish', 'boolean', 'publish',
51 _("Optional: generate an xml file that can be read in a "
52 "browser to display the jobs status."),
54 parser.add_option('i', 'input_boards', 'string', 'input_boards', _("Optional: "
55 "the path to csv file that contain "
56 "the expected boards."),"")
57 parser.add_option('', 'completion', 'boolean', 'no_label',
58 _("Optional (internal use): do not print labels, Works only "
62 class Machine(object):
63 '''Class to manage a ssh connection on a machine
71 sat_path="salomeTools"):
75 self.distribution = None # Will be filled after copying SAT on the machine
77 self.password = passwd
78 self.sat_path = sat_path
79 self.ssh = paramiko.SSHClient()
80 self._connection_successful = None
82 def connect(self, logger):
83 '''Initiate the ssh connection to the remote machine
85 :param logger src.logger.Logger: The logger instance
90 self._connection_successful = False
91 self.ssh.load_system_host_keys()
92 self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
94 self.ssh.connect(self.host,
97 password = self.password)
98 except paramiko.AuthenticationException:
99 message = src.KO_STATUS + _("Authentication failed")
100 except paramiko.BadHostKeyException:
101 message = (src.KO_STATUS +
102 _("The server's host key could not be verified"))
103 except paramiko.SSHException:
104 message = ( _("SSHException error connecting or "
105 "establishing an SSH session"))
107 message = ( _("Error connecting or establishing an SSH session"))
109 self._connection_successful = True
113 def successfully_connected(self, logger):
114 '''Verify if the connection to the remote machine has succeed
116 :param logger src.logger.Logger: The logger instance
117 :return: True if the connection has succeed, False if not
120 if self._connection_successful == None:
121 message = _("Warning : trying to ask if the connection to "
122 "(name: %s host: %s, port: %s, user: %s) is OK whereas there were"
123 " no connection request" %
124 (self.name, self.host, self.port, self.user))
125 logger.write( src.printcolors.printcWarning(message))
126 return self._connection_successful
128 def copy_sat(self, sat_local_path, job_file):
129 '''Copy salomeTools to the remote machine in self.sat_path
133 # open a sftp connection
134 self.sftp = self.ssh.open_sftp()
135 # Create the sat directory on remote machine if it is not existing
136 self.mkdir(self.sat_path, ignore_existing=True)
138 self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
139 # put the job configuration file in order to make it reachable
140 # on the remote machine
141 remote_job_file_name = ".%s" % os.path.basename(job_file)
142 self.sftp.put(job_file, os.path.join(self.sat_path,
143 remote_job_file_name))
144 except Exception as e:
146 self._connection_successful = False
150 def put_dir(self, source, target, filters = []):
151 ''' Uploads the contents of the source directory to the target path. The
152 target directory needs to exists. All sub-directories in source are
153 created under target.
155 for item in os.listdir(source):
158 source_path = os.path.join(source, item)
159 destination_path = os.path.join(target, item)
160 if os.path.islink(source_path):
161 linkto = os.readlink(source_path)
163 self.sftp.symlink(linkto, destination_path)
164 self.sftp.chmod(destination_path,
165 os.stat(source_path).st_mode)
169 if os.path.isfile(source_path):
170 self.sftp.put(source_path, destination_path)
171 self.sftp.chmod(destination_path,
172 os.stat(source_path).st_mode)
174 self.mkdir(destination_path, ignore_existing=True)
175 self.put_dir(source_path, destination_path)
177 def mkdir(self, path, mode=511, ignore_existing=False):
178 ''' Augments mkdir by adding an option to not fail
182 self.sftp.mkdir(path, mode)
189 def exec_command(self, command, logger):
190 '''Execute the command on the remote machine
192 :param command str: The command to be run
193 :param logger src.logger.Logger: The logger instance
194 :return: the stdin, stdout, and stderr of the executing command,
196 :rtype: (paramiko.channel.ChannelFile, paramiko.channel.ChannelFile,
197 paramiko.channel.ChannelFile)
200 # Does not wait the end of the command
201 (stdin, stdout, stderr) = self.ssh.exec_command(command)
202 except paramiko.SSHException:
203 message = src.KO_STATUS + _(
204 ": the server failed to execute the command\n")
205 logger.write( src.printcolors.printcError(message))
206 return (None, None, None)
208 logger.write( src.printcolors.printcError(src.KO_STATUS + '\n'))
209 return (None, None, None)
211 return (stdin, stdout, stderr)
214 '''Close the ssh connection
220 def write_info(self, logger):
221 '''Prints the informations relative to the machine in the logger
222 (terminal traces and log file)
224 :param logger src.logger.Logger: The logger instance
228 logger.write("host : " + self.host + "\n")
229 logger.write("port : " + str(self.port) + "\n")
230 logger.write("user : " + str(self.user) + "\n")
231 if self.successfully_connected(logger):
232 status = src.OK_STATUS
234 status = src.KO_STATUS
235 logger.write("Connection : " + status + "\n\n")
239 '''Class to manage one job
255 self.machine = machine
257 self.timeout = timeout
258 self.application = application
262 # The list of log files to download from the remote machine
263 self.remote_log_files = []
265 # The remote command status
266 # -1 means that it has not been launched,
267 # 0 means success and 1 means fail
269 self.cancelled = False
273 self._has_begun = False
274 self._has_finished = False
275 self._has_timouted = False
276 self._stdin = None # Store the command inputs field
277 self._stdout = None # Store the command outputs field
278 self._stderr = None # Store the command errors field
283 self.name_remote_jobs_pyconf = ".%s" % os.path.basename(job_file_path)
284 self.commands = commands
285 self.command = (os.path.join(self.machine.sat_path, "sat") +
287 os.path.join(self.machine.sat_path,
288 "list_log_files.txt") +
289 " job --jobs_config " +
290 os.path.join(self.machine.sat_path,
291 self.name_remote_jobs_pyconf) +
295 self.command = prefix + ' "' + self.command +'"'
298 """ Get the pid(s) corresponding to the command that have been launched
299 On the remote machine
301 :return: The list of integers corresponding to the found pids
305 cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\''
306 (_, out_pid, _) = self.machine.exec_command(cmd_pid, self.logger)
307 pids_cmd = out_pid.readlines()
308 pids_cmd = [str(src.only_numbers(pid)) for pid in pids_cmd]
312 def kill_remote_process(self, wait=1):
313 '''Kills the process on the remote machine.
315 :return: (the output of the kill, the error of the kill)
319 pids = self.get_pids()
320 cmd_kill = " ; ".join([("kill -2 " + pid) for pid in pids])
321 (_, out_kill, err_kill) = self.machine.exec_command(cmd_kill,
324 return (out_kill, err_kill)
327 '''Returns True if the job has already begun
329 :return: True if the job has already begun
332 return self._has_begun
334 def has_finished(self):
335 '''Returns True if the job has already finished
336 (i.e. all the commands have been executed)
337 If it is finished, the outputs are stored in the fields out and err.
339 :return: True if the job has already finished
343 # If the method has already been called and returned True
344 if self._has_finished:
347 # If the job has not begun yet
348 if not self.has_begun():
351 if self._stdout.channel.closed:
352 self._has_finished = True
353 # Store the result outputs
354 self.out += self._stdout.read().decode()
355 self.err += self._stderr.read().decode()
357 self._Tf = time.time()
358 # And get the remote command status and log files
361 return self._has_finished
363 def get_log_files(self):
364 """Get the log files produced by the command launched
365 on the remote machine, and put it in the log directory of the user,
366 so they can be accessible from
368 # Do not get the files if the command is not finished
369 if not self.has_finished():
370 msg = _("Trying to get log files whereas the job is not finished.")
371 self.logger.write(src.printcolors.printcWarning(msg))
374 # First get the file that contains the list of log files to get
375 tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
376 remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt")
377 self.machine.sftp.get(
381 # Read the file and get the result of the command and all the log files
383 fstream_tmp = open(tmp_file_path, "r")
384 file_lines = fstream_tmp.readlines()
385 file_lines = [line.replace("\n", "") for line in file_lines]
387 os.remove(tmp_file_path)
390 # The first line is the result of the command (0 success or 1 fail)
391 self.res_job = file_lines[0]
392 except Exception as e:
393 self.err += _("Unable to get status from remote file %s: %s" %
394 (remote_path, str(e)))
396 for i, job_path_remote in enumerate(file_lines[1:]):
398 # For each command, there is two files to get :
399 # 1- The xml file describing the command and giving the
401 # 2- The txt file containing the system command traces (like
402 # traces produced by the "make" command)
403 # 3- In case of the test command, there is another file to get :
404 # the xml board that contain the test results
405 dirname = os.path.basename(os.path.dirname(job_path_remote))
406 if dirname != 'OUT' and dirname != 'TEST':
408 local_path = os.path.join(os.path.dirname(
409 self.logger.logFilePath),
410 os.path.basename(job_path_remote))
411 if i==0: # The first is the job command
412 self.logger.add_link(os.path.basename(job_path_remote),
416 elif dirname == 'OUT':
418 local_path = os.path.join(os.path.dirname(
419 self.logger.logFilePath),
421 os.path.basename(job_path_remote))
422 elif dirname == 'TEST':
424 local_path = os.path.join(os.path.dirname(
425 self.logger.logFilePath),
427 os.path.basename(job_path_remote))
430 if not os.path.exists(local_path):
431 self.machine.sftp.get(job_path_remote, local_path)
432 self.remote_log_files.append(local_path)
433 except Exception as e:
434 self.err += _("Unable to get %s log file from remote: %s" %
435 (str(job_path_remote),
438 def has_failed(self):
439 '''Returns True if the job has failed.
440 A job is considered as failed if the machine could not be reached,
441 if the remote command failed,
442 or if the job finished with a time out.
444 :return: True if the job has failed
447 if not self.has_finished():
449 if not self.machine.successfully_connected(self.logger):
451 if self.is_timeout():
453 if self.res_job == "1":
458 """In case of a failing job, one has to cancel every job that depend
459 on it. This method put the job as failed and will not be executed.
463 self._has_begun = True
464 self._has_finished = True
465 self.cancelled = True
466 self.out += _("This job was not launched because its father has failed.")
467 self.err += _("This job was not launched because its father has failed.")
469 def is_running(self):
470 '''Returns True if the job commands are running
472 :return: True if the job is running
475 return self.has_begun() and not self.has_finished()
477 def is_timeout(self):
478 '''Returns True if the job commands has finished with timeout
480 :return: True if the job has finished with timeout
483 return self._has_timouted
485 def time_elapsed(self):
486 """Get the time elapsed since the job launching
488 :return: The number of seconds
491 if not self.has_begun():
494 return T_now - self._T0
496 def check_time(self):
497 """Verify that the job has not exceeded its timeout.
498 If it has, kill the remote command and consider the job as finished.
500 if not self.has_begun():
502 if self.time_elapsed() > self.timeout:
503 self._has_finished = True
504 self._has_timouted = True
505 self._Tf = time.time()
507 (out_kill, _) = self.kill_remote_process()
508 self.out += "TIMEOUT \n" + out_kill.read().decode()
509 self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
512 except Exception as e:
513 self.err += _("Unable to get remote log files: %s" % e)
515 def total_duration(self):
516 """Give the total duration of the job
518 :return: the total duration of the job in seconds
521 return self._Tf - self._T0
524 """Launch the job by executing the remote command.
527 # Prevent multiple run
529 msg = _("Warning: A job can only be launched one time")
530 msg2 = _("Trying to launch the job \"%s\" whereas it has "
531 "already been launched." % self.name)
532 self.logger.write(src.printcolors.printcWarning("%s\n%s\n" % (msg,
536 # Do not execute the command if the machine could not be reached
537 if not self.machine.successfully_connected(self.logger):
538 self._has_finished = True
540 self.err += ("Connection to machine (name : %s, host: %s, port:"
541 " %s, user: %s) has failed\nUse the log command "
542 "to get more information."
543 % (self.machine.name,
548 # Usual case : Launch the command on remote machine
549 self._T0 = time.time()
550 self._stdin, self._stdout, self._stderr = self.machine.exec_command(
553 # If the results are not initialized, finish the job
554 if (self._stdin, self._stdout, self._stderr) == (None, None, None):
555 self._has_finished = True
556 self._Tf = time.time()
558 self.err += "The server failed to execute the command"
560 # Put the beginning flag to true.
561 self._has_begun = True
563 def write_results(self):
564 """Display on the terminal all the job's information
566 self.logger.write("name : " + self.name + "\n")
568 self.logger.write("after : %s\n" % self.after)
569 self.logger.write("Time elapsed : %4imin %2is \n" %
570 (self.total_duration()//60 , self.total_duration()%60))
572 self.logger.write("Begin time : %s\n" %
573 time.strftime('%Y-%m-%d %H:%M:%S',
574 time.localtime(self._T0)) )
576 self.logger.write("End time : %s\n\n" %
577 time.strftime('%Y-%m-%d %H:%M:%S',
578 time.localtime(self._Tf)) )
580 machine_head = "Informations about connection :\n"
581 underline = (len(machine_head) - 2) * "-"
582 self.logger.write(src.printcolors.printcInfo(
583 machine_head+underline+"\n"))
584 self.machine.write_info(self.logger)
586 self.logger.write(src.printcolors.printcInfo("out : \n"))
588 self.logger.write("Unable to get output\n")
590 self.logger.write(self.out + "\n")
591 self.logger.write(src.printcolors.printcInfo("err : \n"))
592 self.logger.write(self.err + "\n")
594 def get_status(self):
595 """Get the status of the job (used by the Gui for xml display)
597 :return: The current status of the job
600 if not self.machine.successfully_connected(self.logger):
601 return "SSH connection KO"
602 if not self.has_begun():
603 return "Not launched"
606 if self.is_running():
607 return "running since " + time.strftime('%Y-%m-%d %H:%M:%S',
608 time.localtime(self._T0))
609 if self.has_finished():
610 if self.is_timeout():
611 return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S',
612 time.localtime(self._Tf))
613 return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S',
614 time.localtime(self._Tf))
617 '''Class to manage the jobs to be run
624 lenght_columns = 20):
625 # The jobs configuration
626 self.cfg_jobs = config_jobs
627 self.job_file_path = job_file_path
628 # The machine that will be used today
630 # The list of machine (hosts, port) that will be used today
631 # (a same host can have several machine instances since there
632 # can be several ssh parameters)
634 # The jobs to be launched today
636 # The jobs that will not be launched today
637 self.ljobs_not_today = []
640 self.len_columns = lenght_columns
642 # the list of jobs that have not been run yet
643 self._l_jobs_not_started = []
644 # the list of jobs that have already ran
645 self._l_jobs_finished = []
646 # the list of jobs that are running
647 self._l_jobs_running = []
649 self.determine_jobs_and_machines()
651 def define_job(self, job_def, machine):
652 '''Takes a pyconf job definition and a machine (from class machine)
653 and returns the job instance corresponding to the definition.
655 :param job_def src.config.Mapping: a job definition
656 :param machine machine: the machine on which the job will run
657 :return: The corresponding job in a job class instance
661 cmmnds = job_def.commands
662 if not "timeout" in job_def:
663 timeout = 4*60*60 # default timeout = 4h
665 timeout = job_def.timeout
667 if 'after' in job_def:
668 after = job_def.after
670 if 'application' in job_def:
671 application = job_def.application
673 if 'board' in job_def:
674 board = job_def.board
676 if "prefix" in job_def:
677 prefix = job_def.prefix
691 def determine_jobs_and_machines(self):
692 '''Function that reads the pyconf jobs definition and instantiates all
693 the machines and jobs to be done today.
698 today = datetime.date.weekday(datetime.date.today())
701 for job_def in self.cfg_jobs.jobs :
703 if not "machine" in job_def:
704 msg = _('WARNING: The job "%s" do not have the key '
705 '"machine", this job is ignored.\n\n' % job_def.name)
706 self.logger.write(src.printcolors.printcWarning(msg))
708 name_machine = job_def.machine
711 for mach in self.lmachines:
712 if mach.name == name_machine:
716 if a_machine == None:
717 for machine_def in self.cfg_jobs.machines:
718 if machine_def.name == name_machine:
719 if 'host' not in machine_def:
720 host = self.runner.cfg.VARS.hostname
722 host = machine_def.host
724 if 'user' not in machine_def:
725 user = self.runner.cfg.VARS.user
727 user = machine_def.user
729 if 'port' not in machine_def:
732 port = machine_def.port
734 if 'password' not in machine_def:
737 passwd = machine_def.password
739 if 'sat_path' not in machine_def:
740 sat_path = "salomeTools"
742 sat_path = machine_def.sat_path
753 self.lmachines.append(a_machine)
754 if (host, port) not in host_list:
755 host_list.append((host, port))
757 if a_machine == None:
758 msg = _("WARNING: The job \"%(job_name)s\" requires the "
759 "machine \"%(machine_name)s\" but this machine "
760 "is not defined in the configuration file.\n"
761 "The job will not be launched")
762 self.logger.write(src.printcolors.printcWarning(msg))
764 a_job = self.define_job(job_def, a_machine)
766 if today in job_def.when:
767 self.ljobs.append(a_job)
768 else: # today in job_def.when
769 self.ljobs_not_today.append(a_job)
771 self.lhosts = host_list
773 def ssh_connection_all_machines(self, pad=50):
774 '''Function that do the ssh connection to every machine
780 self.logger.write(src.printcolors.printcInfo((
781 "Establishing connection with all the machines :\n")))
782 for machine in self.lmachines:
783 # little algorithm in order to display traces
784 begin_line = (_("Connection to %s: " % machine.name))
785 if pad - len(begin_line) < 0:
788 endline = (pad - len(begin_line)) * "." + " "
790 step = "SSH connection"
791 self.logger.write( begin_line + endline + step)
793 # the call to the method that initiate the ssh connection
794 msg = machine.connect(self.logger)
796 # Copy salomeTools to the remote machine
797 if machine.successfully_connected(self.logger):
798 step = _("Remove SAT")
799 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
800 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
801 (__, out_dist, __) = machine.exec_command(
802 "rm -rf %s" % machine.sat_path,
808 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
809 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
811 res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway,
813 # get the remote machine distribution using a sat command
814 (__, out_dist, __) = machine.exec_command(
815 os.path.join(machine.sat_path,
816 "sat config --value VARS.dist --no_label"),
818 machine.distribution = out_dist.read().decode().replace("\n",
820 # Print the status of the copy
822 self.logger.write('\r%s' %
823 ((len(begin_line)+len(endline)+20) * " "), 3)
824 self.logger.write('\r%s%s%s' %
827 src.printcolors.printc(src.OK_STATUS)), 3)
829 self.logger.write('\r%s' %
830 ((len(begin_line)+len(endline)+20) * " "), 3)
831 self.logger.write('\r%s%s%s %s' %
834 src.printcolors.printc(src.KO_STATUS),
835 _("Copy of SAT failed: %s" % res_copy)), 3)
837 self.logger.write('\r%s' %
838 ((len(begin_line)+len(endline)+20) * " "), 3)
839 self.logger.write('\r%s%s%s %s' %
842 src.printcolors.printc(src.KO_STATUS),
844 self.logger.write("\n", 3)
846 self.logger.write("\n")
849 def is_occupied(self, hostname):
850 '''Function that returns True if a job is running on
851 the machine defined by its host and its port.
853 :param hostname (str, int): the pair (host, port)
854 :return: the job that is running on the host,
855 or false if there is no job running on the host.
860 for jb in self.ljobs:
861 if jb.machine.host == host and jb.machine.port == port:
866 def update_jobs_states_list(self):
867 '''Function that updates the lists that store the currently
868 running jobs and the jobs that have already finished.
873 jobs_finished_list = []
874 jobs_running_list = []
875 for jb in self.ljobs:
877 jobs_running_list.append(jb)
879 if jb.has_finished():
880 jobs_finished_list.append(jb)
882 nb_job_finished_before = len(self._l_jobs_finished)
883 self._l_jobs_finished = jobs_finished_list
884 self._l_jobs_running = jobs_running_list
886 nb_job_finished_now = len(self._l_jobs_finished)
888 return nb_job_finished_now > nb_job_finished_before
890 def cancel_dependencies_of_failing_jobs(self):
891 '''Function that cancels all the jobs that depend on a failing one.
897 for job in self.ljobs:
898 if job.after is None:
900 father_job = self.find_job_that_has_name(job.after)
901 if father_job is not None and father_job.has_failed():
904 def find_job_that_has_name(self, name):
905 '''Returns the job by its name.
907 :param name str: a job name
908 :return: the job that has the name.
911 for jb in self.ljobs:
914 # the following is executed only if the job was not found
917 def str_of_length(self, text, length):
918 '''Takes a string text of any length and returns
919 the most close string of length "length".
921 :param text str: any string
922 :param length int: a length for the returned string
923 :return: the most close string of length "length"
926 if len(text) > length:
927 text_out = text[:length-3] + '...'
929 diff = length - len(text)
930 before = " " * (diff//2)
931 after = " " * (diff//2 + diff%2)
932 text_out = before + text + after
936 def display_status(self, len_col):
937 '''Takes a lenght and construct the display of the current status
938 of the jobs in an array that has a column for each host.
939 It displays the job that is currently running on the host
942 :param len_col int: the size of the column
948 for host_port in self.lhosts:
949 jb = self.is_occupied(host_port)
950 if not jb: # nothing running on the host
951 empty = self.str_of_length("empty", len_col)
952 display_line += "|" + empty
954 display_line += "|" + src.printcolors.printcInfo(
955 self.str_of_length(jb.name, len_col))
957 self.logger.write("\r" + display_line + "|")
962 '''The main method. Runs all the jobs on every host.
963 For each host, at a given time, only one job can be running.
964 The jobs that have the field after (that contain the job that has
965 to be run before it) are run after the previous job.
966 This method stops when all the jobs are finished.
973 self.logger.write(src.printcolors.printcInfo(
974 _('Executing the jobs :\n')))
976 for host_port in self.lhosts:
979 if port == 22: # default value
980 text_line += "|" + self.str_of_length(host, self.len_columns)
982 text_line += "|" + self.str_of_length(
983 "("+host+", "+str(port)+")", self.len_columns)
985 tiret_line = " " + "-"*(len(text_line)-1) + "\n"
986 self.logger.write(tiret_line)
987 self.logger.write(text_line + "|\n")
988 self.logger.write(tiret_line)
991 # The infinite loop that runs the jobs
992 l_jobs_not_started = src.deepcopy_list(self.ljobs)
993 while len(self._l_jobs_finished) != len(self.ljobs):
994 new_job_start = False
995 for host_port in self.lhosts:
997 if self.is_occupied(host_port):
1000 for jb in l_jobs_not_started:
1001 if (jb.machine.host, jb.machine.port) != host_port:
1003 if jb.after == None:
1005 l_jobs_not_started.remove(jb)
1006 new_job_start = True
1009 jb_before = self.find_job_that_has_name(jb.after)
1010 if jb_before is None:
1012 msg = _("This job was not launched because its "
1013 "father is not in the jobs list.")
1017 if jb_before.has_finished():
1019 l_jobs_not_started.remove(jb)
1020 new_job_start = True
1022 self.cancel_dependencies_of_failing_jobs()
1023 new_job_finished = self.update_jobs_states_list()
1025 if new_job_start or new_job_finished:
1027 self.gui.update_xml_files(self.ljobs)
1028 # Display the current status
1029 self.display_status(self.len_columns)
1031 # Make sure that the proc is not entirely busy
1034 self.logger.write("\n")
1035 self.logger.write(tiret_line)
1036 self.logger.write("\n\n")
1039 self.gui.update_xml_files(self.ljobs)
1040 self.gui.last_update()
1042 def write_all_results(self):
1043 '''Display all the jobs outputs.
1049 for jb in self.ljobs:
1050 self.logger.write(src.printcolors.printcLabel(
1051 "#------- Results for job %s -------#\n" % jb.name))
1053 self.logger.write("\n\n")
1056 '''Class to manage the the xml data that can be displayed in a browser to
1069 :param xml_dir_path str: The path to the directory where to put
1070 the xml resulting files
1071 :param l_jobs List: the list of jobs that run today
1072 :param l_jobs_not_today List: the list of jobs that do not run today
1073 :param file_boards str: the file path from which to read the
1076 # The logging instance
1077 self.logger = logger
1079 # The prefix to add to the xml files : date_hour
1080 self.prefix = prefix
1082 # The path of the csv files to read to fill the expected boards
1083 self.file_boards = file_boards
1085 if file_boards != "":
1086 today = datetime.date.weekday(datetime.date.today())
1087 self.parse_csv_boards(today)
1089 self.d_input_boards = {}
1091 # The path of the global xml file
1092 self.xml_dir_path = xml_dir_path
1093 # Initialize the xml files
1094 self.global_name = "global_report"
1095 xml_global_path = os.path.join(self.xml_dir_path,
1096 self.global_name + ".xml")
1097 self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path,
1100 # Find history for each job
1102 self.find_history(l_jobs, l_jobs_not_today)
1104 # The xml files that corresponds to the boards.
1105 # {name_board : xml_object}}
1106 self.d_xml_board_files = {}
1108 # Create the lines and columns
1109 self.initialize_boards(l_jobs, l_jobs_not_today)
1111 # Write the xml file
1112 self.update_xml_files(l_jobs)
1114 def add_xml_board(self, name):
1115 '''Add a board to the board list
1116 :param name str: the board name
1118 xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
1119 self.d_xml_board_files[name] = src.xmlManager.XmlLogFile(
1122 self.d_xml_board_files[name].add_simple_node("distributions")
1123 self.d_xml_board_files[name].add_simple_node("applications")
1124 self.d_xml_board_files[name].add_simple_node("board", text=name)
1126 def initialize_boards(self, l_jobs, l_jobs_not_today):
1127 '''Get all the first information needed for each file and write the
1128 first version of the files
1129 :param l_jobs List: the list of jobs that run today
1130 :param l_jobs_not_today List: the list of jobs that do not run today
1132 # Get the boards to fill and put it in a dictionary
1133 # {board_name : xml instance corresponding to the board}
1134 for job in l_jobs + l_jobs_not_today:
1136 if (board is not None and
1137 board not in self.d_xml_board_files.keys()):
1138 self.add_xml_board(board)
1140 # Verify that the boards given as input are done
1141 for board in list(self.d_input_boards.keys()):
1142 if board not in self.d_xml_board_files:
1143 self.add_xml_board(board)
1144 root_node = self.d_xml_board_files[board].xmlroot
1145 src.xmlManager.append_node_attrib(root_node,
1146 {"input_file" : self.file_boards})
1148 # Loop over all jobs in order to get the lines and columns for each
1152 for board in self.d_xml_board_files:
1154 d_application[board] = []
1158 for job in l_jobs + l_jobs_not_today:
1160 if (job.machine.host, job.machine.port) not in l_hosts_ports:
1161 l_hosts_ports.append((job.machine.host, job.machine.port))
1163 distrib = job.machine.distribution
1164 application = job.application
1166 board_job = job.board
1169 for board in self.d_xml_board_files:
1170 if board_job == board:
1171 if distrib is not None and distrib not in d_dist[board]:
1172 d_dist[board].append(distrib)
1173 src.xmlManager.add_simple_node(
1174 self.d_xml_board_files[board].xmlroot.find(
1177 attrib={"name" : distrib})
1179 if board_job == board:
1180 if (application is not None and
1181 application not in d_application[board]):
1182 d_application[board].append(application)
1183 src.xmlManager.add_simple_node(
1184 self.d_xml_board_files[board].xmlroot.find(
1188 "name" : application})
1190 # Verify that there are no missing application or distribution in the
1191 # xml board files (regarding the input boards)
1192 for board in self.d_xml_board_files:
1193 l_dist = d_dist[board]
1194 if board not in self.d_input_boards.keys():
1196 for dist in self.d_input_boards[board]["rows"]:
1197 if dist not in l_dist:
1198 src.xmlManager.add_simple_node(
1199 self.d_xml_board_files[board].xmlroot.find(
1202 attrib={"name" : dist})
1203 l_appli = d_application[board]
1204 for appli in self.d_input_boards[board]["columns"]:
1205 if appli not in l_appli:
1206 src.xmlManager.add_simple_node(
1207 self.d_xml_board_files[board].xmlroot.find(
1210 attrib={"name" : appli})
1212 # Initialize the hosts_ports node for the global file
1213 self.xmlhosts_ports = self.xml_global_file.add_simple_node(
1215 for host, port in l_hosts_ports:
1216 host_port = "%s:%i" % (host, port)
1217 src.xmlManager.add_simple_node(self.xmlhosts_ports,
1219 attrib={"name" : host_port})
1221 # Initialize the jobs node in all files
1222 for xml_file in [self.xml_global_file] + list(
1223 self.d_xml_board_files.values()):
1224 xml_jobs = xml_file.add_simple_node("jobs")
1225 # Get the jobs present in the config file but
1226 # that will not be launched today
1227 self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
1229 # add also the infos node
1230 xml_file.add_simple_node("infos",
1231 attrib={"name" : "last update",
1232 "JobsCommandStatus" : "running"})
1234 # and put the history node
1235 history_node = xml_file.add_simple_node("history")
1236 name_board = os.path.basename(xml_file.logFile)[:-len(".xml")]
1237 # serach for board files
1238 expression = "^[0-9]{8}_+[0-9]{6}_" + name_board + ".xml$"
1239 oExpr = re.compile(expression)
1240 # Get the list of xml borad files that are in the log directory
1241 for file_name in os.listdir(self.xml_dir_path):
1242 if oExpr.search(file_name):
1243 date = os.path.basename(file_name).split("_")[0]
1244 file_path = os.path.join(self.xml_dir_path, file_name)
1245 src.xmlManager.add_simple_node(history_node,
1248 attrib={"date" : date})
1251 # Find in each board the squares that needs to be filled regarding the
1252 # input csv files but that are not covered by a today job
1253 for board in self.d_input_boards.keys():
1254 xml_root_board = self.d_xml_board_files[board].xmlroot
1255 # Find the missing jobs for today
1256 xml_missing = src.xmlManager.add_simple_node(xml_root_board,
1258 for row, column in self.d_input_boards[board]["jobs"]:
1261 if (job.application == column and
1262 job.machine.distribution == row):
1266 src.xmlManager.add_simple_node(xml_missing,
1268 attrib={"distribution" : row,
1269 "application" : column })
1270 # Find the missing jobs not today
1271 xml_missing_not_today = src.xmlManager.add_simple_node(
1273 "missing_jobs_not_today")
1274 for row, column in self.d_input_boards[board]["jobs_not_today"]:
1276 for job in l_jobs_not_today:
1277 if (job.application == column and
1278 job.machine.distribution == row):
1282 src.xmlManager.add_simple_node(xml_missing_not_today,
1284 attrib={"distribution" : row,
1285 "application" : column })
1287 def find_history(self, l_jobs, l_jobs_not_today):
1288 """find, for each job, in the existent xml boards the results for the
1289 job. Store the results in the dictionnary self.history = {name_job :
1290 list of (date, status, list links)}
1292 :param l_jobs List: the list of jobs to run today
1293 :param l_jobs_not_today List: the list of jobs that do not run today
1295 # load the all the history
1296 expression = "^[0-9]{8}_+[0-9]{6}_" + self.global_name + ".xml$"
1297 oExpr = re.compile(expression)
1298 # Get the list of global xml that are in the log directory
1300 for file_name in os.listdir(self.xml_dir_path):
1301 if oExpr.search(file_name):
1302 file_path = os.path.join(self.xml_dir_path, file_name)
1304 global_xml = src.xmlManager.ReadXmlFile(file_path)
1305 l_globalxml.append(global_xml)
1306 except Exception as e:
1307 msg = _("\nWARNING: the file %s can not be read, it will be "
1308 "ignored\n%s" % (file_path, e))
1309 self.logger.write("%s\n" % src.printcolors.printcWarning(
1313 # Construct the dictionnary self.history
1314 for job in l_jobs + l_jobs_not_today:
1316 for global_xml in l_globalxml:
1317 date = os.path.basename(global_xml.filePath).split("_")[0]
1318 global_root_node = global_xml.xmlroot.find("jobs")
1319 job_node = src.xmlManager.find_node_by_attrib(
1325 if job_node.find("remote_log_file_path") is not None:
1326 link = job_node.find("remote_log_file_path").text
1327 res_job = job_node.find("res").text
1328 if link != "nothing":
1329 l_links.append((date, res_job, link))
1330 l_links = sorted(l_links, reverse=True)
1331 self.history[job.name] = l_links
1333 def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
1334 '''Get all the first information needed for each file and write the
1335 first version of the files
1337 :param xml_node_jobs etree.Element: the node corresponding to a job
1338 :param l_jobs_not_today List: the list of jobs that do not run today
1340 for job in l_jobs_not_today:
1341 xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1343 attrib={"name" : job.name})
1344 src.xmlManager.add_simple_node(xmlj, "application", job.application)
1345 src.xmlManager.add_simple_node(xmlj,
1347 job.machine.distribution)
1348 src.xmlManager.add_simple_node(xmlj, "board", job.board)
1349 src.xmlManager.add_simple_node(xmlj,
1350 "commands", " ; ".join(job.commands))
1351 src.xmlManager.add_simple_node(xmlj, "state", "Not today")
1352 src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1353 src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1354 src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1355 src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1356 src.xmlManager.add_simple_node(xmlj, "sat_path",
1357 job.machine.sat_path)
1358 xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1359 for i, (date, res_job, link) in enumerate(self.history[job.name]):
1361 # tag the first one (the last one)
1362 src.xmlManager.add_simple_node(xml_history,
1365 attrib={"date" : date,
1369 src.xmlManager.add_simple_node(xml_history,
1372 attrib={"date" : date,
1376 def parse_csv_boards(self, today):
1377 """ Parse the csv file that describes the boards to produce and fill
1378 the dict d_input_boards that contain the csv file contain
1380 :param today int: the current day of the week
1382 # open the csv file and read its content
1384 with open(self.file_boards, 'r') as f:
1385 reader = csv.reader(f,delimiter=CSV_DELIMITER)
1388 # get the delimiter for the boards (empty line)
1389 boards_delimiter = [''] * len(l_read[0])
1390 # Make the list of boards, by splitting with the delimiter
1391 l_boards = [list(y) for x, y in itertools.groupby(l_read,
1392 lambda z: z == boards_delimiter) if not x]
1394 # loop over the csv lists of lines and get the rows, columns and jobs
1396 for input_board in l_boards:
1398 board_name = input_board[0][0]
1401 columns = input_board[0][1:]
1406 for line in input_board[1:]:
1409 for i, square in enumerate(line[1:]):
1412 days = square.split(DAYS_SEPARATOR)
1413 days = [int(day) for day in days]
1414 job = (row, columns[i])
1418 jobs_not_today.append(job)
1420 d_boards[board_name] = {"rows" : rows,
1421 "columns" : columns,
1423 "jobs_not_today" : jobs_not_today}
1425 self.d_input_boards = d_boards
1427 def update_xml_files(self, l_jobs):
1428 '''Write all the xml files with updated information about the jobs
1430 :param l_jobs List: the list of jobs that run today
1432 for xml_file in [self.xml_global_file] + list(
1433 self.d_xml_board_files.values()):
1434 self.update_xml_file(l_jobs, xml_file)
1437 self.write_xml_files()
1439 def update_xml_file(self, l_jobs, xml_file):
1440 '''update information about the jobs for the file xml_file
1442 :param l_jobs List: the list of jobs that run today
1443 :param xml_file xmlManager.XmlLogFile: the xml instance to update
1446 xml_node_jobs = xml_file.xmlroot.find('jobs')
1447 # Update the job names and status node
1449 # Find the node corresponding to the job and delete it
1450 # in order to recreate it
1451 for xmljob in xml_node_jobs.findall('job'):
1452 if xmljob.attrib['name'] == job.name:
1453 xml_node_jobs.remove(xmljob)
1457 T0 = time.strftime('%Y-%m-%d %H:%M:%S',
1458 time.localtime(job._T0))
1461 Tf = time.strftime('%Y-%m-%d %H:%M:%S',
1462 time.localtime(job._Tf))
1464 # recreate the job node
1465 xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1467 attrib={"name" : job.name})
1468 src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1469 src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1470 src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1471 src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1472 xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1473 for date, res_job, link in self.history[job.name]:
1474 src.xmlManager.add_simple_node(xml_history,
1477 attrib={"date" : date,
1480 src.xmlManager.add_simple_node(xmlj, "sat_path",
1481 job.machine.sat_path)
1482 src.xmlManager.add_simple_node(xmlj, "application", job.application)
1483 src.xmlManager.add_simple_node(xmlj, "distribution",
1484 job.machine.distribution)
1485 src.xmlManager.add_simple_node(xmlj, "board", job.board)
1486 src.xmlManager.add_simple_node(xmlj, "timeout", str(job.timeout))
1487 src.xmlManager.add_simple_node(xmlj, "commands",
1488 " ; ".join(job.commands))
1489 src.xmlManager.add_simple_node(xmlj, "state", job.get_status())
1490 src.xmlManager.add_simple_node(xmlj, "begin", T0)
1491 src.xmlManager.add_simple_node(xmlj, "end", Tf)
1492 src.xmlManager.add_simple_node(xmlj, "out",
1493 src.printcolors.cleancolor(job.out))
1494 src.xmlManager.add_simple_node(xmlj, "err",
1495 src.printcolors.cleancolor(job.err))
1496 src.xmlManager.add_simple_node(xmlj, "res", str(job.res_job))
1497 if len(job.remote_log_files) > 0:
1498 src.xmlManager.add_simple_node(xmlj,
1499 "remote_log_file_path",
1500 job.remote_log_files[0])
1502 src.xmlManager.add_simple_node(xmlj,
1503 "remote_log_file_path",
1505 # Search for the test log if there is any
1506 l_test_log_files = self.find_test_log(job.remote_log_files)
1507 xml_test = src.xmlManager.add_simple_node(xmlj,
1508 "test_log_file_path")
1509 for test_log_path, res_test, nb_fails in l_test_log_files:
1510 test_path_node = src.xmlManager.add_simple_node(xml_test,
1513 test_path_node.attrib["res"] = res_test
1514 test_path_node.attrib["nb_fails"] = nb_fails
1516 xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
1517 # get the job father
1518 if job.after is not None:
1521 if jb.name == job.after:
1524 if (job_father is not None and
1525 len(job_father.remote_log_files) > 0):
1526 link = job_father.remote_log_files[0]
1529 src.xmlManager.append_node_attrib(xmlafter, {"link" : link})
1531 # Verify that the job is to be done today regarding the input csv
1533 if job.board and job.board in self.d_input_boards.keys():
1535 for dist, appli in self.d_input_boards[job.board]["jobs"]:
1536 if (job.machine.distribution == dist
1537 and job.application == appli):
1539 src.xmlManager.add_simple_node(xmlj,
1544 src.xmlManager.add_simple_node(xmlj,
1550 xml_node_infos = xml_file.xmlroot.find('infos')
1551 src.xmlManager.append_node_attrib(xml_node_infos,
1553 datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
1556 def find_test_log(self, l_remote_log_files):
1557 '''Find if there is a test log (board) in the remote log files and
1558 the path to it. There can be several test command, so the result is
1561 :param l_remote_log_files List: the list of all remote log files
1562 :return: the list of (test log files path, res of the command)
1566 for file_path in l_remote_log_files:
1567 dirname = os.path.basename(os.path.dirname(file_path))
1568 file_name = os.path.basename(file_path)
1569 regex = src.logger.log_all_command_file_expression
1570 oExpr = re.compile(regex)
1571 if dirname == "TEST" and oExpr.search(file_name):
1572 # find the res of the command
1573 prod_node = etree.parse(file_path).getroot().find("product")
1574 res_test = prod_node.attrib["global_res"]
1575 # find the number of fails
1576 testbase_node = prod_node.find("tests").find("testbase")
1577 nb_fails = int(testbase_node.attrib["failed"])
1578 # put the file path, the res of the test command and the number
1579 # of fails in the output
1580 res.append((file_path, res_test, nb_fails))
1584 def last_update(self, finish_status = "finished"):
1585 '''update information about the jobs for the file xml_file
1587 :param l_jobs List: the list of jobs that run today
1588 :param xml_file xmlManager.XmlLogFile: the xml instance to update
1590 for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
1591 xml_node_infos = xml_file.xmlroot.find('infos')
1592 src.xmlManager.append_node_attrib(xml_node_infos,
1593 attrib={"JobsCommandStatus" : finish_status})
1595 self.write_xml_files()
1597 def write_xml_file(self, xml_file, stylesheet):
1598 ''' Write one xml file and the same file with prefix
1600 xml_file.write_tree(stylesheet)
1601 file_path = xml_file.logFile
1602 file_dir = os.path.dirname(file_path)
1603 file_name = os.path.basename(file_path)
1604 file_name_with_prefix = self.prefix + "_" + file_name
1605 xml_file.write_tree(stylesheet, os.path.join(file_dir,
1606 file_name_with_prefix))
1608 def write_xml_files(self):
1609 ''' Write the xml files
1611 self.write_xml_file(self.xml_global_file, STYLESHEET_GLOBAL)
1612 for xml_file in self.d_xml_board_files.values():
1613 self.write_xml_file(xml_file, STYLESHEET_BOARD)
1615 def get_config_file_path(job_config_name, l_cfg_dir):
1617 file_jobs_cfg = None
1618 if os.path.exists(job_config_name) and job_config_name.endswith(".pyconf"):
1620 file_jobs_cfg = job_config_name
1622 for cfg_dir in l_cfg_dir:
1623 file_jobs_cfg = os.path.join(cfg_dir, job_config_name)
1624 if not file_jobs_cfg.endswith('.pyconf'):
1625 file_jobs_cfg += '.pyconf'
1627 if not os.path.exists(file_jobs_cfg):
1632 return found, file_jobs_cfg
1635 # Describes the command
1637 return _("The jobs command launches maintenances that are described"
1638 " in the dedicated jobs configuration file.\n\nexample:\nsat "
1639 "jobs --name my_jobs --publish")
1643 def run(args, runner, logger):
1645 (options, args) = parser.parse_args(args)
1647 l_cfg_dir = runner.cfg.PATHS.JOBPATH
1649 # list option : display all the available config files
1651 for cfg_dir in l_cfg_dir:
1652 if not options.no_label:
1653 logger.write("------ %s\n" %
1654 src.printcolors.printcHeader(cfg_dir))
1655 if not os.path.exists(cfg_dir):
1657 for f in sorted(os.listdir(cfg_dir)):
1658 if not f.endswith('.pyconf'):
1661 logger.write("%s\n" % cfilename)
1664 # Make sure the jobs_config option has been called
1665 if not options.jobs_cfg:
1666 message = _("The option --jobs_config is required\n")
1667 src.printcolors.printcError(message)
1670 # Find the file in the directories, unless it is a full path
1671 # merge all in a config
1672 merger = src.pyconf.ConfigMerger()
1673 config_jobs = src.pyconf.Config()
1674 l_conf_files_path = []
1675 for config_file in options.jobs_cfg:
1676 found, file_jobs_cfg = get_config_file_path(config_file, l_cfg_dir)
1678 msg = _("The file configuration %(name_file)s was not found."
1679 "\nUse the --list option to get the "
1680 "possible files." % config_file)
1681 src.printcolors.printcError(msg)
1683 l_conf_files_path.append(file_jobs_cfg)
1684 # Read the config that is in the file
1685 one_config_jobs = src.read_config_from_a_file(file_jobs_cfg)
1686 merger.merge(config_jobs, one_config_jobs)
1689 (_("Platform"), runner.cfg.VARS.dist),
1690 (_("Files containing the jobs configuration"), l_conf_files_path)
1692 src.print_info(logger, info)
1694 if options.only_jobs:
1695 l_jb = src.pyconf.Sequence()
1696 for jb in config_jobs.jobs:
1697 if jb.name in options.only_jobs:
1699 "Job that was given in only_jobs option parameters\n")
1700 config_jobs.jobs = l_jb
1702 # Make a unique file that contain all the jobs in order to use it
1704 name_pyconf = "_".join([os.path.basename(path)[:-len('.pyconf')]
1705 for path in l_conf_files_path]) + ".pyconf"
1706 path_pyconf = src.get_tmp_filename(runner.cfg, name_pyconf)
1708 f = file( path_pyconf , 'w')
1709 config_jobs.__save__(f)
1712 today_jobs = Jobs(runner,
1716 # SSH connection to all machines
1717 today_jobs.ssh_connection_all_machines()
1718 if options.test_connection:
1723 logger.write(src.printcolors.printcInfo(
1724 _("Initialize the xml boards : ")), 5)
1727 # Copy the stylesheets in the log directory
1728 log_dir = runner.cfg.USER.log_dir
1729 xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
1731 files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
1732 files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD))
1733 files_to_copy.append(os.path.join(xsl_dir, "running.gif"))
1734 for file_path in files_to_copy:
1735 shutil.copy2(file_path, log_dir)
1737 # Instanciate the Gui in order to produce the xml files that contain all
1739 gui = Gui(runner.cfg.USER.log_dir,
1741 today_jobs.ljobs_not_today,
1742 runner.cfg.VARS.datehour,
1744 file_boards = options.input_boards)
1746 logger.write(src.printcolors.printcSuccess("OK"), 5)
1747 logger.write("\n\n", 5)
1750 # Display the list of the xml files
1751 logger.write(src.printcolors.printcInfo(("Here is the list of published"
1753 logger.write("%s\n" % gui.xml_global_file.logFile, 4)
1754 for board in gui.d_xml_board_files.keys():
1755 file_path = gui.d_xml_board_files[board].logFile
1756 file_name = os.path.basename(file_path)
1757 logger.write("%s\n" % file_path, 4)
1758 logger.add_link(file_name, "board", 0, board)
1760 logger.write("\n", 4)
1762 today_jobs.gui = gui
1766 # Run all the jobs contained in config_jobs
1767 today_jobs.run_jobs()
1768 except KeyboardInterrupt:
1770 logger.write("\n\n%s\n\n" %
1771 (src.printcolors.printcWarning(_("Forced interruption"))), 1)
1776 msg = _("Killing the running jobs and trying"
1777 " to get the corresponding logs\n")
1778 logger.write(src.printcolors.printcWarning(msg))
1780 # find the potential not finished jobs and kill them
1781 for jb in today_jobs.ljobs:
1782 if not jb.has_finished():
1785 jb.kill_remote_process()
1786 except Exception as e:
1787 msg = _("Failed to kill job %s: %s\n" % (jb.name, e))
1788 logger.write(src.printcolors.printcWarning(msg))
1789 if jb.res_job != "0":
1793 today_jobs.gui.last_update(_("Forced interruption"))
1796 today_jobs.gui.last_update()
1797 # Output the results
1798 today_jobs.write_all_results()
1799 # Remove the temporary pyconf file
1800 if os.path.exists(path_pyconf):
1801 os.remove(path_pyconf)