3 # Copyright (C) 2010-2013 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 import src.ElementTree as etree
34 STYLESHEET_GLOBAL = "jobs_global_report.xsl"
35 STYLESHEET_BOARD = "jobs_board_report.xsl"
40 parser = src.options.Options()
42 parser.add_option('n', 'name', 'list2', 'jobs_cfg',
43 _('Mandatory: The name of the config file that contains'
44 ' the jobs configuration. Can be a list.'))
45 parser.add_option('o', 'only_jobs', 'list2', 'only_jobs',
46 _('Optional: the list of jobs to launch, by their name. '))
47 parser.add_option('l', 'list', 'boolean', 'list',
48 _('Optional: list all available config files.'))
49 parser.add_option('t', 'test_connection', 'boolean', 'test_connection',
50 _("Optional: try to connect to the machines. "
51 "Not executing the jobs."),
53 parser.add_option('p', 'publish', 'boolean', 'publish',
54 _("Optional: generate an xml file that can be read in a "
55 "browser to display the jobs status."),
57 parser.add_option('i', 'input_boards', 'string', 'input_boards', _("Optional: "
58 "the path to csv file that contain "
59 "the expected boards."),"")
60 parser.add_option('', 'completion', 'boolean', 'no_label',
61 _("Optional (internal use): do not print labels, Works only "
65 class Machine(object):
66 '''Class to manage a ssh connection on a machine
74 sat_path="salomeTools"):
78 self.distribution = None # Will be filled after copying SAT on the machine
80 self.password = passwd
81 self.sat_path = sat_path
82 self.ssh = paramiko.SSHClient()
83 self._connection_successful = None
85 def connect(self, logger):
86 '''Initiate the ssh connection to the remote machine
88 :param logger src.logger.Logger: The logger instance
93 self._connection_successful = False
94 self.ssh.load_system_host_keys()
95 self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
97 self.ssh.connect(self.host,
100 password = self.password)
101 except paramiko.AuthenticationException:
102 message = src.KO_STATUS + _("Authentication failed")
103 except paramiko.BadHostKeyException:
104 message = (src.KO_STATUS +
105 _("The server's host key could not be verified"))
106 except paramiko.SSHException:
107 message = ( _("SSHException error connecting or "
108 "establishing an SSH session"))
110 message = ( _("Error connecting or establishing an SSH session"))
112 self._connection_successful = True
116 def successfully_connected(self, logger):
117 '''Verify if the connection to the remote machine has succeed
119 :param logger src.logger.Logger: The logger instance
120 :return: True if the connection has succeed, False if not
123 if self._connection_successful == None:
124 message = _("Warning : trying to ask if the connection to "
125 "(name: %s host: %s, port: %s, user: %s) is OK whereas there were"
126 " no connection request" %
127 (self.name, self.host, self.port, self.user))
128 logger.write( src.printcolors.printcWarning(message))
129 return self._connection_successful
131 def copy_sat(self, sat_local_path, job_file):
132 '''Copy salomeTools to the remote machine in self.sat_path
136 # open a sftp connection
137 self.sftp = self.ssh.open_sftp()
138 # Create the sat directory on remote machine if it is not existing
139 self.mkdir(self.sat_path, ignore_existing=True)
141 self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
142 # put the job configuration file in order to make it reachable
143 # on the remote machine
144 remote_job_file_name = ".%s" % os.path.basename(job_file)
145 self.sftp.put(job_file, os.path.join(self.sat_path,
146 remote_job_file_name))
147 except Exception as e:
149 self._connection_successful = False
153 def put_dir(self, source, target, filters = []):
154 ''' Uploads the contents of the source directory to the target path. The
155 target directory needs to exists. All sub-directories in source are
156 created under target.
158 for item in os.listdir(source):
161 source_path = os.path.join(source, item)
162 destination_path = os.path.join(target, item)
163 if os.path.islink(source_path):
164 linkto = os.readlink(source_path)
166 self.sftp.symlink(linkto, destination_path)
167 self.sftp.chmod(destination_path,
168 os.stat(source_path).st_mode)
172 if os.path.isfile(source_path):
173 self.sftp.put(source_path, destination_path)
174 self.sftp.chmod(destination_path,
175 os.stat(source_path).st_mode)
177 self.mkdir(destination_path, ignore_existing=True)
178 self.put_dir(source_path, destination_path)
180 def mkdir(self, path, mode=511, ignore_existing=False):
181 ''' Augments mkdir by adding an option to not fail
185 self.sftp.mkdir(path, mode)
192 def exec_command(self, command, logger):
193 '''Execute the command on the remote machine
195 :param command str: The command to be run
196 :param logger src.logger.Logger: The logger instance
197 :return: the stdin, stdout, and stderr of the executing command,
199 :rtype: (paramiko.channel.ChannelFile, paramiko.channel.ChannelFile,
200 paramiko.channel.ChannelFile)
203 # Does not wait the end of the command
204 (stdin, stdout, stderr) = self.ssh.exec_command(command)
205 except paramiko.SSHException:
206 message = src.KO_STATUS + _(
207 ": the server failed to execute the command\n")
208 logger.write( src.printcolors.printcError(message))
209 return (None, None, None)
211 logger.write( src.printcolors.printcError(src.KO_STATUS + '\n'))
212 return (None, None, None)
214 return (stdin, stdout, stderr)
217 '''Close the ssh connection
223 def write_info(self, logger):
224 '''Prints the informations relative to the machine in the logger
225 (terminal traces and log file)
227 :param logger src.logger.Logger: The logger instance
231 logger.write("host : " + self.host + "\n")
232 logger.write("port : " + str(self.port) + "\n")
233 logger.write("user : " + str(self.user) + "\n")
234 if self.successfully_connected(logger):
235 status = src.OK_STATUS
237 status = src.KO_STATUS
238 logger.write("Connection : " + status + "\n\n")
242 '''Class to manage one job
258 self.machine = machine
260 self.timeout = timeout
261 self.application = application
265 # The list of log files to download from the remote machine
266 self.remote_log_files = []
268 # The remote command status
269 # -1 means that it has not been launched,
270 # 0 means success and 1 means fail
272 self.cancelled = False
276 self._has_begun = False
277 self._has_finished = False
278 self._has_timouted = False
279 self._stdin = None # Store the command inputs field
280 self._stdout = None # Store the command outputs field
281 self._stderr = None # Store the command errors field
286 self.name_remote_jobs_pyconf = ".%s" % os.path.basename(job_file_path)
287 self.commands = commands
288 self.command = (os.path.join(self.machine.sat_path, "sat") +
290 os.path.join(self.machine.sat_path,
291 "list_log_files.txt") +
292 " job --jobs_config " +
293 os.path.join(self.machine.sat_path,
294 self.name_remote_jobs_pyconf) +
298 self.command = prefix + ' "' + self.command +'"'
301 """ Get the pid(s) corresponding to the command that have been launched
302 On the remote machine
304 :return: The list of integers corresponding to the found pids
308 cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\''
309 (_, out_pid, _) = self.machine.exec_command(cmd_pid, self.logger)
310 pids_cmd = out_pid.readlines()
311 pids_cmd = [str(src.only_numbers(pid)) for pid in pids_cmd]
315 def kill_remote_process(self, wait=1):
316 '''Kills the process on the remote machine.
318 :return: (the output of the kill, the error of the kill)
322 pids = self.get_pids()
323 cmd_kill = " ; ".join([("kill -2 " + pid) for pid in pids])
324 (_, out_kill, err_kill) = self.machine.exec_command(cmd_kill,
327 return (out_kill, err_kill)
330 '''Returns True if the job has already begun
332 :return: True if the job has already begun
335 return self._has_begun
337 def has_finished(self):
338 '''Returns True if the job has already finished
339 (i.e. all the commands have been executed)
340 If it is finished, the outputs are stored in the fields out and err.
342 :return: True if the job has already finished
346 # If the method has already been called and returned True
347 if self._has_finished:
350 # If the job has not begun yet
351 if not self.has_begun():
354 if self._stdout.channel.closed:
355 self._has_finished = True
356 # Store the result outputs
357 self.out += self._stdout.read().decode()
358 self.err += self._stderr.read().decode()
360 self._Tf = time.time()
361 # And get the remote command status and log files
364 except Exception as e:
365 self.err += _("Unable to get remote log files: %s" % e)
367 return self._has_finished
369 def get_log_files(self):
370 """Get the log files produced by the command launched
371 on the remote machine, and put it in the log directory of the user,
372 so they can be accessible from
374 # Do not get the files if the command is not finished
375 if not self.has_finished():
376 msg = _("Trying to get log files whereas the job is not finished.")
377 self.logger.write(src.printcolors.printcWarning(msg))
380 # First get the file that contains the list of log files to get
381 tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
382 remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt")
383 self.machine.sftp.get(
387 # Read the file and get the result of the command and all the log files
389 fstream_tmp = open(tmp_file_path, "r")
390 file_lines = fstream_tmp.readlines()
391 file_lines = [line.replace("\n", "") for line in file_lines]
393 os.remove(tmp_file_path)
396 # The first line is the result of the command (0 success or 1 fail)
397 self.res_job = file_lines[0]
398 except Exception as e:
399 self.err += _("Unable to get status from remote file %s: %s" %
400 (remote_path, str(e)))
402 for i, job_path_remote in enumerate(file_lines[1:]):
404 # For each command, there is two files to get :
405 # 1- The xml file describing the command and giving the
407 # 2- The txt file containing the system command traces (like
408 # traces produced by the "make" command)
409 # 3- In case of the test command, there is another file to get :
410 # the xml board that contain the test results
411 dirname = os.path.basename(os.path.dirname(job_path_remote))
412 if dirname != 'OUT' and dirname != 'TEST':
414 local_path = os.path.join(os.path.dirname(
415 self.logger.logFilePath),
416 os.path.basename(job_path_remote))
417 if i==0: # The first is the job command
418 self.logger.add_link(os.path.basename(job_path_remote),
422 elif dirname == 'OUT':
424 local_path = os.path.join(os.path.dirname(
425 self.logger.logFilePath),
427 os.path.basename(job_path_remote))
428 elif dirname == 'TEST':
430 local_path = os.path.join(os.path.dirname(
431 self.logger.logFilePath),
433 os.path.basename(job_path_remote))
436 if not os.path.exists(local_path):
437 self.machine.sftp.get(job_path_remote, local_path)
438 self.remote_log_files.append(local_path)
439 except Exception as e:
440 self.err += _("Unable to get %s log file from remote: %s" %
441 (str(job_path_remote),
444 def has_failed(self):
445 '''Returns True if the job has failed.
446 A job is considered as failed if the machine could not be reached,
447 if the remote command failed,
448 or if the job finished with a time out.
450 :return: True if the job has failed
453 if not self.has_finished():
455 if not self.machine.successfully_connected(self.logger):
457 if self.is_timeout():
459 if self.res_job == "1":
464 """In case of a failing job, one has to cancel every job that depend
465 on it. This method put the job as failed and will not be executed.
469 self._has_begun = True
470 self._has_finished = True
471 self.cancelled = True
472 self.out += _("This job was not launched because its father has failed.")
473 self.err += _("This job was not launched because its father has failed.")
475 def is_running(self):
476 '''Returns True if the job commands are running
478 :return: True if the job is running
481 return self.has_begun() and not self.has_finished()
483 def is_timeout(self):
484 '''Returns True if the job commands has finished with timeout
486 :return: True if the job has finished with timeout
489 return self._has_timouted
491 def time_elapsed(self):
492 """Get the time elapsed since the job launching
494 :return: The number of seconds
497 if not self.has_begun():
500 return T_now - self._T0
502 def check_time(self):
503 """Verify that the job has not exceeded its timeout.
504 If it has, kill the remote command and consider the job as finished.
506 if not self.has_begun():
508 if self.time_elapsed() > self.timeout:
509 self._has_finished = True
510 self._has_timouted = True
511 self._Tf = time.time()
513 (out_kill, _) = self.kill_remote_process()
514 self.out += "TIMEOUT \n" + out_kill.read().decode()
515 self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
518 except Exception as e:
519 self.err += _("Unable to get remote log files: %s" % e)
521 def total_duration(self):
522 """Give the total duration of the job
524 :return: the total duration of the job in seconds
527 return self._Tf - self._T0
530 """Launch the job by executing the remote command.
533 # Prevent multiple run
535 msg = _("Warning: A job can only be launched one time")
536 msg2 = _("Trying to launch the job \"%s\" whereas it has "
537 "already been launched." % self.name)
538 self.logger.write(src.printcolors.printcWarning("%s\n%s\n" % (msg,
542 # Do not execute the command if the machine could not be reached
543 if not self.machine.successfully_connected(self.logger):
544 self._has_finished = True
546 self.err += ("Connection to machine (name : %s, host: %s, port:"
547 " %s, user: %s) has failed\nUse the log command "
548 "to get more information."
549 % (self.machine.name,
554 # Usual case : Launch the command on remote machine
555 self._T0 = time.time()
556 self._stdin, self._stdout, self._stderr = self.machine.exec_command(
559 # If the results are not initialized, finish the job
560 if (self._stdin, self._stdout, self._stderr) == (None, None, None):
561 self._has_finished = True
562 self._Tf = time.time()
564 self.err += "The server failed to execute the command"
566 # Put the beginning flag to true.
567 self._has_begun = True
569 def write_results(self):
570 """Display on the terminal all the job's information
572 self.logger.write("name : " + self.name + "\n")
574 self.logger.write("after : %s\n" % self.after)
575 self.logger.write("Time elapsed : %4imin %2is \n" %
576 (self.total_duration()//60 , self.total_duration()%60))
578 self.logger.write("Begin time : %s\n" %
579 time.strftime('%Y-%m-%d %H:%M:%S',
580 time.localtime(self._T0)) )
582 self.logger.write("End time : %s\n\n" %
583 time.strftime('%Y-%m-%d %H:%M:%S',
584 time.localtime(self._Tf)) )
586 machine_head = "Informations about connection :\n"
587 underline = (len(machine_head) - 2) * "-"
588 self.logger.write(src.printcolors.printcInfo(
589 machine_head+underline+"\n"))
590 self.machine.write_info(self.logger)
592 self.logger.write(src.printcolors.printcInfo("out : \n"))
594 self.logger.write("Unable to get output\n")
596 self.logger.write(self.out + "\n")
597 self.logger.write(src.printcolors.printcInfo("err : \n"))
598 self.logger.write(self.err + "\n")
600 def get_status(self):
601 """Get the status of the job (used by the Gui for xml display)
603 :return: The current status of the job
606 if not self.machine.successfully_connected(self.logger):
607 return "SSH connection KO"
608 if not self.has_begun():
609 return "Not launched"
612 if self.is_running():
613 return "running since " + time.strftime('%Y-%m-%d %H:%M:%S',
614 time.localtime(self._T0))
615 if self.has_finished():
616 if self.is_timeout():
617 return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S',
618 time.localtime(self._Tf))
619 return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S',
620 time.localtime(self._Tf))
623 '''Class to manage the jobs to be run
630 lenght_columns = 20):
631 # The jobs configuration
632 self.cfg_jobs = config_jobs
633 self.job_file_path = job_file_path
634 # The machine that will be used today
636 # The list of machine (hosts, port) that will be used today
637 # (a same host can have several machine instances since there
638 # can be several ssh parameters)
640 # The jobs to be launched today
642 # The jobs that will not be launched today
643 self.ljobs_not_today = []
646 self.len_columns = lenght_columns
648 # the list of jobs that have not been run yet
649 self._l_jobs_not_started = []
650 # the list of jobs that have already ran
651 self._l_jobs_finished = []
652 # the list of jobs that are running
653 self._l_jobs_running = []
655 self.determine_jobs_and_machines()
657 def define_job(self, job_def, machine):
658 '''Takes a pyconf job definition and a machine (from class machine)
659 and returns the job instance corresponding to the definition.
661 :param job_def src.config.Mapping: a job definition
662 :param machine machine: the machine on which the job will run
663 :return: The corresponding job in a job class instance
667 cmmnds = job_def.commands
668 if not "timeout" in job_def:
669 timeout = 4*60*60 # default timeout = 4h
671 timeout = job_def.timeout
673 if 'after' in job_def:
674 after = job_def.after
676 if 'application' in job_def:
677 application = job_def.application
679 if 'board' in job_def:
680 board = job_def.board
682 if "prefix" in job_def:
683 prefix = job_def.prefix
697 def determine_jobs_and_machines(self):
698 '''Function that reads the pyconf jobs definition and instantiates all
699 the machines and jobs to be done today.
704 today = datetime.date.weekday(datetime.date.today())
707 for job_def in self.cfg_jobs.jobs :
709 if not "machine" in job_def:
710 msg = _('WARNING: The job "%s" do not have the key '
711 '"machine", this job is ignored.\n\n' % job_def.name)
712 self.logger.write(src.printcolors.printcWarning(msg))
714 name_machine = job_def.machine
717 for mach in self.lmachines:
718 if mach.name == name_machine:
722 if a_machine == None:
723 for machine_def in self.cfg_jobs.machines:
724 if machine_def.name == name_machine:
725 if 'host' not in machine_def:
726 host = self.runner.cfg.VARS.hostname
728 host = machine_def.host
730 if 'user' not in machine_def:
731 user = self.runner.cfg.VARS.user
733 user = machine_def.user
735 if 'port' not in machine_def:
738 port = machine_def.port
740 if 'password' not in machine_def:
743 passwd = machine_def.password
745 if 'sat_path' not in machine_def:
746 sat_path = "salomeTools"
748 sat_path = machine_def.sat_path
759 self.lmachines.append(a_machine)
760 if (host, port) not in host_list:
761 host_list.append((host, port))
763 if a_machine == None:
764 msg = _("WARNING: The job \"%(job_name)s\" requires the "
765 "machine \"%(machine_name)s\" but this machine "
766 "is not defined in the configuration file.\n"
767 "The job will not be launched")
768 self.logger.write(src.printcolors.printcWarning(msg))
770 a_job = self.define_job(job_def, a_machine)
772 if today in job_def.when:
773 self.ljobs.append(a_job)
774 else: # today in job_def.when
775 self.ljobs_not_today.append(a_job)
777 self.lhosts = host_list
779 def ssh_connection_all_machines(self, pad=50):
780 '''Function that do the ssh connection to every machine
786 self.logger.write(src.printcolors.printcInfo((
787 "Establishing connection with all the machines :\n")))
788 for machine in self.lmachines:
789 # little algorithm in order to display traces
790 begin_line = (_("Connection to %s: " % machine.name))
791 if pad - len(begin_line) < 0:
794 endline = (pad - len(begin_line)) * "." + " "
796 step = "SSH connection"
797 self.logger.write( begin_line + endline + step)
799 # the call to the method that initiate the ssh connection
800 msg = machine.connect(self.logger)
802 # Copy salomeTools to the remote machine
803 if machine.successfully_connected(self.logger):
804 step = _("Remove SAT")
805 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
806 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
807 (__, out_dist, __) = machine.exec_command(
808 "rm -rf %s" % machine.sat_path,
814 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
815 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
817 res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway,
819 # get the remote machine distribution using a sat command
820 (__, out_dist, __) = machine.exec_command(
821 os.path.join(machine.sat_path,
822 "sat config --value VARS.dist --no_label"),
824 machine.distribution = out_dist.read().decode().replace("\n",
826 # Print the status of the copy
828 self.logger.write('\r%s' %
829 ((len(begin_line)+len(endline)+20) * " "), 3)
830 self.logger.write('\r%s%s%s' %
833 src.printcolors.printc(src.OK_STATUS)), 3)
835 self.logger.write('\r%s' %
836 ((len(begin_line)+len(endline)+20) * " "), 3)
837 self.logger.write('\r%s%s%s %s' %
840 src.printcolors.printc(src.KO_STATUS),
841 _("Copy of SAT failed: %s" % res_copy)), 3)
843 self.logger.write('\r%s' %
844 ((len(begin_line)+len(endline)+20) * " "), 3)
845 self.logger.write('\r%s%s%s %s' %
848 src.printcolors.printc(src.KO_STATUS),
850 self.logger.write("\n", 3)
852 self.logger.write("\n")
855 def is_occupied(self, hostname):
856 '''Function that returns True if a job is running on
857 the machine defined by its host and its port.
859 :param hostname (str, int): the pair (host, port)
860 :return: the job that is running on the host,
861 or false if there is no job running on the host.
866 for jb in self.ljobs:
867 if jb.machine.host == host and jb.machine.port == port:
872 def update_jobs_states_list(self):
873 '''Function that updates the lists that store the currently
874 running jobs and the jobs that have already finished.
879 jobs_finished_list = []
880 jobs_running_list = []
881 for jb in self.ljobs:
883 jobs_running_list.append(jb)
885 if jb.has_finished():
886 jobs_finished_list.append(jb)
888 nb_job_finished_before = len(self._l_jobs_finished)
889 self._l_jobs_finished = jobs_finished_list
890 self._l_jobs_running = jobs_running_list
892 nb_job_finished_now = len(self._l_jobs_finished)
894 return nb_job_finished_now > nb_job_finished_before
896 def cancel_dependencies_of_failing_jobs(self):
897 '''Function that cancels all the jobs that depend on a failing one.
903 for job in self.ljobs:
904 if job.after is None:
906 father_job = self.find_job_that_has_name(job.after)
907 if father_job is not None and father_job.has_failed():
910 def find_job_that_has_name(self, name):
911 '''Returns the job by its name.
913 :param name str: a job name
914 :return: the job that has the name.
917 for jb in self.ljobs:
920 # the following is executed only if the job was not found
923 def str_of_length(self, text, length):
924 '''Takes a string text of any length and returns
925 the most close string of length "length".
927 :param text str: any string
928 :param length int: a length for the returned string
929 :return: the most close string of length "length"
932 if len(text) > length:
933 text_out = text[:length-3] + '...'
935 diff = length - len(text)
936 before = " " * (diff//2)
937 after = " " * (diff//2 + diff%2)
938 text_out = before + text + after
942 def display_status(self, len_col):
943 '''Takes a lenght and construct the display of the current status
944 of the jobs in an array that has a column for each host.
945 It displays the job that is currently running on the host
948 :param len_col int: the size of the column
954 for host_port in self.lhosts:
955 jb = self.is_occupied(host_port)
956 if not jb: # nothing running on the host
957 empty = self.str_of_length("empty", len_col)
958 display_line += "|" + empty
960 display_line += "|" + src.printcolors.printcInfo(
961 self.str_of_length(jb.name, len_col))
963 self.logger.write("\r" + display_line + "|")
968 '''The main method. Runs all the jobs on every host.
969 For each host, at a given time, only one job can be running.
970 The jobs that have the field after (that contain the job that has
971 to be run before it) are run after the previous job.
972 This method stops when all the jobs are finished.
979 self.logger.write(src.printcolors.printcInfo(
980 _('Executing the jobs :\n')))
982 for host_port in self.lhosts:
985 if port == 22: # default value
986 text_line += "|" + self.str_of_length(host, self.len_columns)
988 text_line += "|" + self.str_of_length(
989 "("+host+", "+str(port)+")", self.len_columns)
991 tiret_line = " " + "-"*(len(text_line)-1) + "\n"
992 self.logger.write(tiret_line)
993 self.logger.write(text_line + "|\n")
994 self.logger.write(tiret_line)
997 # The infinite loop that runs the jobs
998 l_jobs_not_started = src.deepcopy_list(self.ljobs)
999 while len(self._l_jobs_finished) != len(self.ljobs):
1000 new_job_start = False
1001 for host_port in self.lhosts:
1003 if self.is_occupied(host_port):
1006 for jb in l_jobs_not_started:
1007 if (jb.machine.host, jb.machine.port) != host_port:
1009 if jb.after == None:
1011 l_jobs_not_started.remove(jb)
1012 new_job_start = True
1015 jb_before = self.find_job_that_has_name(jb.after)
1016 if jb_before is None:
1018 msg = _("This job was not launched because its "
1019 "father is not in the jobs list.")
1023 if jb_before.has_finished():
1025 l_jobs_not_started.remove(jb)
1026 new_job_start = True
1028 self.cancel_dependencies_of_failing_jobs()
1029 new_job_finished = self.update_jobs_states_list()
1031 if new_job_start or new_job_finished:
1033 self.gui.update_xml_files(self.ljobs)
1034 # Display the current status
1035 self.display_status(self.len_columns)
1037 # Make sure that the proc is not entirely busy
1040 self.logger.write("\n")
1041 self.logger.write(tiret_line)
1042 self.logger.write("\n\n")
1045 self.gui.update_xml_files(self.ljobs)
1046 self.gui.last_update()
1048 def write_all_results(self):
1049 '''Display all the jobs outputs.
1055 for jb in self.ljobs:
1056 self.logger.write(src.printcolors.printcLabel(
1057 "#------- Results for job %s -------#\n" % jb.name))
1059 self.logger.write("\n\n")
1062 '''Class to manage the the xml data that can be displayed in a browser to
1075 :param xml_dir_path str: The path to the directory where to put
1076 the xml resulting files
1077 :param l_jobs List: the list of jobs that run today
1078 :param l_jobs_not_today List: the list of jobs that do not run today
1079 :param file_boards str: the file path from which to read the
1082 # The logging instance
1083 self.logger = logger
1085 # The prefix to add to the xml files : date_hour
1086 self.prefix = prefix
1088 # The path of the csv files to read to fill the expected boards
1089 self.file_boards = file_boards
1091 if file_boards != "":
1092 today = datetime.date.weekday(datetime.date.today())
1093 self.parse_csv_boards(today)
1095 self.d_input_boards = {}
1097 # The path of the global xml file
1098 self.xml_dir_path = xml_dir_path
1099 # Initialize the xml files
1100 self.global_name = "global_report"
1101 xml_global_path = os.path.join(self.xml_dir_path,
1102 self.global_name + ".xml")
1103 self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path,
1106 # Find history for each job
1108 self.find_history(l_jobs, l_jobs_not_today)
1110 # The xml files that corresponds to the boards.
1111 # {name_board : xml_object}}
1112 self.d_xml_board_files = {}
1114 # Create the lines and columns
1115 self.initialize_boards(l_jobs, l_jobs_not_today)
1117 # Write the xml file
1118 self.update_xml_files(l_jobs)
1120 def add_xml_board(self, name):
1121 '''Add a board to the board list
1122 :param name str: the board name
1124 xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
1125 self.d_xml_board_files[name] = src.xmlManager.XmlLogFile(
1128 self.d_xml_board_files[name].add_simple_node("distributions")
1129 self.d_xml_board_files[name].add_simple_node("applications")
1130 self.d_xml_board_files[name].add_simple_node("board", text=name)
1132 def initialize_boards(self, l_jobs, l_jobs_not_today):
1133 '''Get all the first information needed for each file and write the
1134 first version of the files
1135 :param l_jobs List: the list of jobs that run today
1136 :param l_jobs_not_today List: the list of jobs that do not run today
1138 # Get the boards to fill and put it in a dictionary
1139 # {board_name : xml instance corresponding to the board}
1140 for job in l_jobs + l_jobs_not_today:
1142 if (board is not None and
1143 board not in self.d_xml_board_files.keys()):
1144 self.add_xml_board(board)
1146 # Verify that the boards given as input are done
1147 for board in list(self.d_input_boards.keys()):
1148 if board not in self.d_xml_board_files:
1149 self.add_xml_board(board)
1150 root_node = self.d_xml_board_files[board].xmlroot
1151 src.xmlManager.append_node_attrib(root_node,
1152 {"input_file" : self.file_boards})
1154 # Loop over all jobs in order to get the lines and columns for each
1158 for board in self.d_xml_board_files:
1160 d_application[board] = []
1164 for job in l_jobs + l_jobs_not_today:
1166 if (job.machine.host, job.machine.port) not in l_hosts_ports:
1167 l_hosts_ports.append((job.machine.host, job.machine.port))
1169 distrib = job.machine.distribution
1170 application = job.application
1172 board_job = job.board
1175 for board in self.d_xml_board_files:
1176 if board_job == board:
1177 if distrib is not None and distrib not in d_dist[board]:
1178 d_dist[board].append(distrib)
1179 src.xmlManager.add_simple_node(
1180 self.d_xml_board_files[board].xmlroot.find(
1183 attrib={"name" : distrib})
1185 if board_job == board:
1186 if (application is not None and
1187 application not in d_application[board]):
1188 d_application[board].append(application)
1189 src.xmlManager.add_simple_node(
1190 self.d_xml_board_files[board].xmlroot.find(
1194 "name" : application})
1196 # Verify that there are no missing application or distribution in the
1197 # xml board files (regarding the input boards)
1198 for board in self.d_xml_board_files:
1199 l_dist = d_dist[board]
1200 if board not in self.d_input_boards.keys():
1202 for dist in self.d_input_boards[board]["rows"]:
1203 if dist not in l_dist:
1204 src.xmlManager.add_simple_node(
1205 self.d_xml_board_files[board].xmlroot.find(
1208 attrib={"name" : dist})
1209 l_appli = d_application[board]
1210 for appli in self.d_input_boards[board]["columns"]:
1211 if appli not in l_appli:
1212 src.xmlManager.add_simple_node(
1213 self.d_xml_board_files[board].xmlroot.find(
1216 attrib={"name" : appli})
1218 # Initialize the hosts_ports node for the global file
1219 self.xmlhosts_ports = self.xml_global_file.add_simple_node(
1221 for host, port in l_hosts_ports:
1222 host_port = "%s:%i" % (host, port)
1223 src.xmlManager.add_simple_node(self.xmlhosts_ports,
1225 attrib={"name" : host_port})
1227 # Initialize the jobs node in all files
1228 for xml_file in [self.xml_global_file] + list(
1229 self.d_xml_board_files.values()):
1230 xml_jobs = xml_file.add_simple_node("jobs")
1231 # Get the jobs present in the config file but
1232 # that will not be launched today
1233 self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
1235 # add also the infos node
1236 xml_file.add_simple_node("infos",
1237 attrib={"name" : "last update",
1238 "JobsCommandStatus" : "running"})
1240 # and put the history node
1241 history_node = xml_file.add_simple_node("history")
1242 name_board = os.path.basename(xml_file.logFile)[:-len(".xml")]
1243 # serach for board files
1244 expression = "^[0-9]{8}_+[0-9]{6}_" + name_board + ".xml$"
1245 oExpr = re.compile(expression)
1246 # Get the list of xml borad files that are in the log directory
1247 for file_name in os.listdir(self.xml_dir_path):
1248 if oExpr.search(file_name):
1249 date = os.path.basename(file_name).split("_")[0]
1250 file_path = os.path.join(self.xml_dir_path, file_name)
1251 src.xmlManager.add_simple_node(history_node,
1254 attrib={"date" : date})
1257 # Find in each board the squares that needs to be filled regarding the
1258 # input csv files but that are not covered by a today job
1259 for board in self.d_input_boards.keys():
1260 xml_root_board = self.d_xml_board_files[board].xmlroot
1261 # Find the missing jobs for today
1262 xml_missing = src.xmlManager.add_simple_node(xml_root_board,
1264 for row, column in self.d_input_boards[board]["jobs"]:
1267 if (job.application == column and
1268 job.machine.distribution == row):
1272 src.xmlManager.add_simple_node(xml_missing,
1274 attrib={"distribution" : row,
1275 "application" : column })
1276 # Find the missing jobs not today
1277 xml_missing_not_today = src.xmlManager.add_simple_node(
1279 "missing_jobs_not_today")
1280 for row, column in self.d_input_boards[board]["jobs_not_today"]:
1282 for job in l_jobs_not_today:
1283 if (job.application == column and
1284 job.machine.distribution == row):
1288 src.xmlManager.add_simple_node(xml_missing_not_today,
1290 attrib={"distribution" : row,
1291 "application" : column })
1293 def find_history(self, l_jobs, l_jobs_not_today):
1294 """find, for each job, in the existent xml boards the results for the
1295 job. Store the results in the dictionnary self.history = {name_job :
1296 list of (date, status, list links)}
1298 :param l_jobs List: the list of jobs to run today
1299 :param l_jobs_not_today List: the list of jobs that do not run today
1301 # load the all the history
1302 expression = "^[0-9]{8}_+[0-9]{6}_" + self.global_name + ".xml$"
1303 oExpr = re.compile(expression)
1304 # Get the list of global xml that are in the log directory
1306 for file_name in os.listdir(self.xml_dir_path):
1307 if oExpr.search(file_name):
1308 file_path = os.path.join(self.xml_dir_path, file_name)
1310 global_xml = src.xmlManager.ReadXmlFile(file_path)
1311 l_globalxml.append(global_xml)
1312 except Exception as e:
1313 msg = _("\nWARNING: the file %s can not be read, it will be "
1314 "ignored\n%s" % (file_path, e))
1315 self.logger.write("%s\n" % src.printcolors.printcWarning(
1318 # Construct the dictionnary self.history
1319 for job in l_jobs + l_jobs_not_today:
1321 for global_xml in l_globalxml:
1322 date = os.path.basename(global_xml.filePath).split("_")[0]
1323 global_root_node = global_xml.xmlroot.find("jobs")
1324 job_node = src.xmlManager.find_node_by_attrib(
1330 if job_node.find("remote_log_file_path") is not None:
1331 link = job_node.find("remote_log_file_path").text
1332 res_job = job_node.find("res").text
1333 if link != "nothing":
1334 l_links.append((date, res_job, link))
1335 l_links = sorted(l_links, reverse=True)
1336 self.history[job.name] = l_links
1338 def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
1339 '''Get all the first information needed for each file and write the
1340 first version of the files
1342 :param xml_node_jobs etree.Element: the node corresponding to a job
1343 :param l_jobs_not_today List: the list of jobs that do not run today
1345 for job in l_jobs_not_today:
1346 xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1348 attrib={"name" : job.name})
1349 src.xmlManager.add_simple_node(xmlj, "application", job.application)
1350 src.xmlManager.add_simple_node(xmlj,
1352 job.machine.distribution)
1353 src.xmlManager.add_simple_node(xmlj, "board", job.board)
1354 src.xmlManager.add_simple_node(xmlj,
1355 "commands", " ; ".join(job.commands))
1356 src.xmlManager.add_simple_node(xmlj, "state", "Not today")
1357 src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1358 src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1359 src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1360 src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1361 src.xmlManager.add_simple_node(xmlj, "sat_path",
1362 job.machine.sat_path)
1363 xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1364 for i, (date, res_job, link) in enumerate(self.history[job.name]):
1366 # tag the first one (the last one)
1367 src.xmlManager.add_simple_node(xml_history,
1370 attrib={"date" : date,
1374 src.xmlManager.add_simple_node(xml_history,
1377 attrib={"date" : date,
1381 def parse_csv_boards(self, today):
1382 """ Parse the csv file that describes the boards to produce and fill
1383 the dict d_input_boards that contain the csv file contain
1385 :param today int: the current day of the week
1387 # open the csv file and read its content
1389 with open(self.file_boards, 'r') as f:
1390 reader = csv.reader(f,delimiter=CSV_DELIMITER)
1393 # get the delimiter for the boards (empty line)
1394 boards_delimiter = [''] * len(l_read[0])
1395 # Make the list of boards, by splitting with the delimiter
1396 l_boards = [list(y) for x, y in itertools.groupby(l_read,
1397 lambda z: z == boards_delimiter) if not x]
1399 # loop over the csv lists of lines and get the rows, columns and jobs
1401 for input_board in l_boards:
1403 board_name = input_board[0][0]
1406 columns = input_board[0][1:]
1411 for line in input_board[1:]:
1414 for i, square in enumerate(line[1:]):
1417 days = square.split(DAYS_SEPARATOR)
1418 days = [int(day) for day in days]
1419 job = (row, columns[i])
1423 jobs_not_today.append(job)
1425 d_boards[board_name] = {"rows" : rows,
1426 "columns" : columns,
1428 "jobs_not_today" : jobs_not_today}
1430 self.d_input_boards = d_boards
1432 def update_xml_files(self, l_jobs):
1433 '''Write all the xml files with updated information about the jobs
1435 :param l_jobs List: the list of jobs that run today
1437 for xml_file in [self.xml_global_file] + list(
1438 self.d_xml_board_files.values()):
1439 self.update_xml_file(l_jobs, xml_file)
1442 self.write_xml_files()
1444 def update_xml_file(self, l_jobs, xml_file):
1445 '''update information about the jobs for the file xml_file
1447 :param l_jobs List: the list of jobs that run today
1448 :param xml_file xmlManager.XmlLogFile: the xml instance to update
1451 xml_node_jobs = xml_file.xmlroot.find('jobs')
1452 # Update the job names and status node
1454 # Find the node corresponding to the job and delete it
1455 # in order to recreate it
1456 for xmljob in xml_node_jobs.findall('job'):
1457 if xmljob.attrib['name'] == job.name:
1458 xml_node_jobs.remove(xmljob)
1462 T0 = time.strftime('%Y-%m-%d %H:%M:%S',
1463 time.localtime(job._T0))
1466 Tf = time.strftime('%Y-%m-%d %H:%M:%S',
1467 time.localtime(job._Tf))
1469 # recreate the job node
1470 xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1472 attrib={"name" : job.name})
1473 src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1474 src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1475 src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1476 src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1477 xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1478 for date, res_job, link in self.history[job.name]:
1479 src.xmlManager.add_simple_node(xml_history,
1482 attrib={"date" : date,
1485 src.xmlManager.add_simple_node(xmlj, "sat_path",
1486 job.machine.sat_path)
1487 src.xmlManager.add_simple_node(xmlj, "application", job.application)
1488 src.xmlManager.add_simple_node(xmlj, "distribution",
1489 job.machine.distribution)
1490 src.xmlManager.add_simple_node(xmlj, "board", job.board)
1491 src.xmlManager.add_simple_node(xmlj, "timeout", str(job.timeout))
1492 src.xmlManager.add_simple_node(xmlj, "commands",
1493 " ; ".join(job.commands))
1494 src.xmlManager.add_simple_node(xmlj, "state", job.get_status())
1495 src.xmlManager.add_simple_node(xmlj, "begin", T0)
1496 src.xmlManager.add_simple_node(xmlj, "end", Tf)
1497 src.xmlManager.add_simple_node(xmlj, "out",
1498 src.printcolors.cleancolor(job.out))
1499 src.xmlManager.add_simple_node(xmlj, "err",
1500 src.printcolors.cleancolor(job.err))
1501 src.xmlManager.add_simple_node(xmlj, "res", str(job.res_job))
1502 if len(job.remote_log_files) > 0:
1503 src.xmlManager.add_simple_node(xmlj,
1504 "remote_log_file_path",
1505 job.remote_log_files[0])
1507 src.xmlManager.add_simple_node(xmlj,
1508 "remote_log_file_path",
1510 # Search for the test log if there is any
1511 l_test_log_files = self.find_test_log(job.remote_log_files)
1512 xml_test = src.xmlManager.add_simple_node(xmlj,
1513 "test_log_file_path")
1514 for test_log_path, res_test, nb_fails in l_test_log_files:
1515 test_path_node = src.xmlManager.add_simple_node(xml_test,
1518 test_path_node.attrib["res"] = res_test
1519 test_path_node.attrib["nb_fails"] = nb_fails
1521 xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
1522 # get the job father
1523 if job.after is not None:
1526 if jb.name == job.after:
1529 if (job_father is not None and
1530 len(job_father.remote_log_files) > 0):
1531 link = job_father.remote_log_files[0]
1534 src.xmlManager.append_node_attrib(xmlafter, {"link" : link})
1536 # Verify that the job is to be done today regarding the input csv
1538 if job.board and job.board in self.d_input_boards.keys():
1540 for dist, appli in self.d_input_boards[job.board]["jobs"]:
1541 if (job.machine.distribution == dist
1542 and job.application == appli):
1544 src.xmlManager.add_simple_node(xmlj,
1549 src.xmlManager.add_simple_node(xmlj,
1555 xml_node_infos = xml_file.xmlroot.find('infos')
1556 src.xmlManager.append_node_attrib(xml_node_infos,
1558 datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
1561 def find_test_log(self, l_remote_log_files):
1562 '''Find if there is a test log (board) in the remote log files and
1563 the path to it. There can be several test command, so the result is
1566 :param l_remote_log_files List: the list of all remote log files
1567 :return: the list of (test log files path, res of the command)
1571 for file_path in l_remote_log_files:
1572 dirname = os.path.basename(os.path.dirname(file_path))
1573 file_name = os.path.basename(file_path)
1574 regex = src.logger.log_all_command_file_expression
1575 oExpr = re.compile(regex)
1576 if dirname == "TEST" and oExpr.search(file_name):
1577 # find the res of the command
1578 prod_node = etree.parse(file_path).getroot().find("product")
1579 res_test = prod_node.attrib["global_res"]
1580 # find the number of fails
1581 testbase_node = prod_node.find("tests").find("testbase")
1582 nb_fails = int(testbase_node.attrib["failed"])
1583 # put the file path, the res of the test command and the number
1584 # of fails in the output
1585 res.append((file_path, res_test, nb_fails))
1589 def last_update(self, finish_status = "finished"):
1590 '''update information about the jobs for the file xml_file
1592 :param l_jobs List: the list of jobs that run today
1593 :param xml_file xmlManager.XmlLogFile: the xml instance to update
1595 for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
1596 xml_node_infos = xml_file.xmlroot.find('infos')
1597 src.xmlManager.append_node_attrib(xml_node_infos,
1598 attrib={"JobsCommandStatus" : finish_status})
1600 self.write_xml_files()
1602 def write_xml_file(self, xml_file, stylesheet):
1603 ''' Write one xml file and the same file with prefix
1605 xml_file.write_tree(stylesheet)
1606 file_path = xml_file.logFile
1607 file_dir = os.path.dirname(file_path)
1608 file_name = os.path.basename(file_path)
1609 file_name_with_prefix = self.prefix + "_" + file_name
1610 xml_file.write_tree(stylesheet, os.path.join(file_dir,
1611 file_name_with_prefix))
1613 def write_xml_files(self):
1614 ''' Write the xml files
1616 self.write_xml_file(self.xml_global_file, STYLESHEET_GLOBAL)
1617 for xml_file in self.d_xml_board_files.values():
1618 self.write_xml_file(xml_file, STYLESHEET_BOARD)
1620 def get_config_file_path(job_config_name, l_cfg_dir):
1622 file_jobs_cfg = None
1623 if os.path.exists(job_config_name) and job_config_name.endswith(".pyconf"):
1625 file_jobs_cfg = job_config_name
1627 for cfg_dir in l_cfg_dir:
1628 file_jobs_cfg = os.path.join(cfg_dir, job_config_name)
1629 if not file_jobs_cfg.endswith('.pyconf'):
1630 file_jobs_cfg += '.pyconf'
1632 if not os.path.exists(file_jobs_cfg):
1637 return found, file_jobs_cfg
1640 # Describes the command
1642 return _("The jobs command launches maintenances that are described"
1643 " in the dedicated jobs configuration file.\n\nexample:\nsat "
1644 "jobs --name my_jobs --publish")
1648 def run(args, runner, logger):
1650 (options, args) = parser.parse_args(args)
1652 l_cfg_dir = runner.cfg.PATHS.JOBPATH
1654 # list option : display all the available config files
1656 for cfg_dir in l_cfg_dir:
1657 if not options.no_label:
1658 logger.write("------ %s\n" %
1659 src.printcolors.printcHeader(cfg_dir))
1660 if not os.path.exists(cfg_dir):
1662 for f in sorted(os.listdir(cfg_dir)):
1663 if not f.endswith('.pyconf'):
1666 logger.write("%s\n" % cfilename)
1669 # Make sure the jobs_config option has been called
1670 if not options.jobs_cfg:
1671 message = _("The option --jobs_config is required\n")
1672 src.printcolors.printcError(message)
1675 # Find the file in the directories, unless it is a full path
1676 # merge all in a config
1677 merger = src.pyconf.ConfigMerger()
1678 config_jobs = src.pyconf.Config()
1679 l_conf_files_path = []
1680 for config_file in options.jobs_cfg:
1681 found, file_jobs_cfg = get_config_file_path(config_file, l_cfg_dir)
1683 msg = _("The file configuration %s was not found."
1684 "\nUse the --list option to get the "
1685 "possible files." % config_file)
1686 logger.write("%s\n" % src.printcolors.printcError(msg), 1)
1688 l_conf_files_path.append(file_jobs_cfg)
1689 # Read the config that is in the file
1690 one_config_jobs = src.read_config_from_a_file(file_jobs_cfg)
1691 merger.merge(config_jobs, one_config_jobs)
1694 (_("Platform"), runner.cfg.VARS.dist),
1695 (_("Files containing the jobs configuration"), l_conf_files_path)
1697 src.print_info(logger, info)
1699 if options.only_jobs:
1700 l_jb = src.pyconf.Sequence()
1701 for jb in config_jobs.jobs:
1702 if jb.name in options.only_jobs:
1704 "Job that was given in only_jobs option parameters\n")
1705 config_jobs.jobs = l_jb
1707 # Make a unique file that contain all the jobs in order to use it
1709 name_pyconf = "_".join([os.path.basename(path)[:-len('.pyconf')]
1710 for path in l_conf_files_path]) + ".pyconf"
1711 path_pyconf = src.get_tmp_filename(runner.cfg, name_pyconf)
1713 f = file( path_pyconf , 'w')
1714 config_jobs.__save__(f)
1717 today_jobs = Jobs(runner,
1721 # SSH connection to all machines
1722 today_jobs.ssh_connection_all_machines()
1723 if options.test_connection:
1728 logger.write(src.printcolors.printcInfo(
1729 _("Initialize the xml boards : ")), 5)
1732 # Copy the stylesheets in the log directory
1733 log_dir = runner.cfg.USER.log_dir
1734 xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
1736 files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
1737 files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD))
1738 files_to_copy.append(os.path.join(xsl_dir, "running.gif"))
1739 for file_path in files_to_copy:
1740 shutil.copy2(file_path, log_dir)
1742 # Instanciate the Gui in order to produce the xml files that contain all
1744 gui = Gui(runner.cfg.USER.log_dir,
1746 today_jobs.ljobs_not_today,
1747 runner.cfg.VARS.datehour,
1749 file_boards = options.input_boards)
1751 logger.write(src.printcolors.printcSuccess("OK"), 5)
1752 logger.write("\n\n", 5)
1755 # Display the list of the xml files
1756 logger.write(src.printcolors.printcInfo(("Here is the list of published"
1758 logger.write("%s\n" % gui.xml_global_file.logFile, 4)
1759 for board in gui.d_xml_board_files.keys():
1760 file_path = gui.d_xml_board_files[board].logFile
1761 file_name = os.path.basename(file_path)
1762 logger.write("%s\n" % file_path, 4)
1763 logger.add_link(file_name, "board", 0, board)
1765 logger.write("\n", 4)
1767 today_jobs.gui = gui
1771 # Run all the jobs contained in config_jobs
1772 today_jobs.run_jobs()
1773 except KeyboardInterrupt:
1775 logger.write("\n\n%s\n\n" %
1776 (src.printcolors.printcWarning(_("Forced interruption"))), 1)
1777 except Exception as e:
1778 msg = _("CRITICAL ERROR: The jobs loop has been interrupted\n")
1779 logger.write("\n\n%s\n" % src.printcolors.printcError(msg) )
1780 logger.write("%s\n" % str(e))
1782 __, __, exc_traceback = sys.exc_info()
1783 fp = tempfile.TemporaryFile()
1784 traceback.print_tb(exc_traceback, file=fp)
1787 logger.write("\nTRACEBACK: %s\n" % stack.replace('"',"'"), 1)
1793 msg = _("Killing the running jobs and trying"
1794 " to get the corresponding logs\n")
1795 logger.write(src.printcolors.printcWarning(msg))
1797 # find the potential not finished jobs and kill them
1798 for jb in today_jobs.ljobs:
1799 if not jb.has_finished():
1802 jb.kill_remote_process()
1803 except Exception as e:
1804 msg = _("Failed to kill job %s: %s\n" % (jb.name, e))
1805 logger.write(src.printcolors.printcWarning(msg))
1806 if jb.res_job != "0":
1810 today_jobs.gui.last_update(_("Forced interruption"))
1813 today_jobs.gui.last_update()
1814 # Output the results
1815 today_jobs.write_all_results()
1816 # Remove the temporary pyconf file
1817 if os.path.exists(path_pyconf):
1818 os.remove(path_pyconf)