3 # Copyright (C) 2010-2013 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 import src.ElementTree as etree
34 STYLESHEET_GLOBAL = "jobs_global_report.xsl"
35 STYLESHEET_BOARD = "jobs_board_report.xsl"
40 parser = src.options.Options()
42 parser.add_option('n', 'name', 'list2', 'jobs_cfg',
43 _('Mandatory: The name of the config file that contains'
44 ' the jobs configuration. Can be a list.'))
45 parser.add_option('o', 'only_jobs', 'list2', 'only_jobs',
46 _('Optional: the list of jobs to launch, by their name. '))
47 parser.add_option('l', 'list', 'boolean', 'list',
48 _('Optional: list all available config files.'))
49 parser.add_option('t', 'test_connection', 'boolean', 'test_connection',
50 _("Optional: try to connect to the machines. "
51 "Not executing the jobs."),
53 parser.add_option('p', 'publish', 'boolean', 'publish',
54 _("Optional: generate an xml file that can be read in a "
55 "browser to display the jobs status."),
57 parser.add_option('i', 'input_boards', 'string', 'input_boards', _("Optional: "
58 "the path to csv file that contain "
59 "the expected boards."),"")
60 parser.add_option('', 'completion', 'boolean', 'no_label',
61 _("Optional (internal use): do not print labels, Works only "
65 class Machine(object):
66 '''Class to manage a ssh connection on a machine
74 sat_path="salomeTools"):
78 self.distribution = None # Will be filled after copying SAT on the machine
80 self.password = passwd
81 self.sat_path = sat_path
82 self.ssh = paramiko.SSHClient()
83 self._connection_successful = None
85 def connect(self, logger):
86 '''Initiate the ssh connection to the remote machine
88 :param logger src.logger.Logger: The logger instance
93 self._connection_successful = False
94 self.ssh.load_system_host_keys()
95 self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
97 self.ssh.connect(self.host,
100 password = self.password)
101 except paramiko.AuthenticationException:
102 message = src.KO_STATUS + _("Authentication failed")
103 except paramiko.BadHostKeyException:
104 message = (src.KO_STATUS +
105 _("The server's host key could not be verified"))
106 except paramiko.SSHException:
107 message = ( _("SSHException error connecting or "
108 "establishing an SSH session"))
110 message = ( _("Error connecting or establishing an SSH session"))
112 self._connection_successful = True
116 def successfully_connected(self, logger):
117 '''Verify if the connection to the remote machine has succeed
119 :param logger src.logger.Logger: The logger instance
120 :return: True if the connection has succeed, False if not
123 if self._connection_successful == None:
124 message = _("Warning : trying to ask if the connection to "
125 "(name: %s host: %s, port: %s, user: %s) is OK whereas there were"
126 " no connection request" %
127 (self.name, self.host, self.port, self.user))
128 logger.write( src.printcolors.printcWarning(message))
129 return self._connection_successful
131 def copy_sat(self, sat_local_path, job_file):
132 '''Copy salomeTools to the remote machine in self.sat_path
136 # open a sftp connection
137 self.sftp = self.ssh.open_sftp()
138 # Create the sat directory on remote machine if it is not existing
139 self.mkdir(self.sat_path, ignore_existing=True)
141 self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
142 # put the job configuration file in order to make it reachable
143 # on the remote machine
144 remote_job_file_name = ".%s" % os.path.basename(job_file)
145 self.sftp.put(job_file, os.path.join(self.sat_path,
146 remote_job_file_name))
147 except Exception as e:
149 self._connection_successful = False
153 def put_dir(self, source, target, filters = []):
154 ''' Uploads the contents of the source directory to the target path. The
155 target directory needs to exists. All sub-directories in source are
156 created under target.
158 for item in os.listdir(source):
161 source_path = os.path.join(source, item)
162 destination_path = os.path.join(target, item)
163 if os.path.islink(source_path):
164 linkto = os.readlink(source_path)
166 self.sftp.symlink(linkto, destination_path)
167 self.sftp.chmod(destination_path,
168 os.stat(source_path).st_mode)
172 if os.path.isfile(source_path):
173 self.sftp.put(source_path, destination_path)
174 self.sftp.chmod(destination_path,
175 os.stat(source_path).st_mode)
177 self.mkdir(destination_path, ignore_existing=True)
178 self.put_dir(source_path, destination_path)
180 def mkdir(self, path, mode=511, ignore_existing=False):
181 ''' Augments mkdir by adding an option to not fail
185 self.sftp.mkdir(path, mode)
192 def exec_command(self, command, logger):
193 '''Execute the command on the remote machine
195 :param command str: The command to be run
196 :param logger src.logger.Logger: The logger instance
197 :return: the stdin, stdout, and stderr of the executing command,
199 :rtype: (paramiko.channel.ChannelFile, paramiko.channel.ChannelFile,
200 paramiko.channel.ChannelFile)
203 # Does not wait the end of the command
204 (stdin, stdout, stderr) = self.ssh.exec_command(command)
205 except paramiko.SSHException:
206 message = src.KO_STATUS + _(
207 ": the server failed to execute the command\n")
208 logger.write( src.printcolors.printcError(message))
209 return (None, None, None)
211 logger.write( src.printcolors.printcError(src.KO_STATUS + '\n'))
212 return (None, None, None)
214 return (stdin, stdout, stderr)
217 '''Close the ssh connection
223 def write_info(self, logger):
224 '''Prints the informations relative to the machine in the logger
225 (terminal traces and log file)
227 :param logger src.logger.Logger: The logger instance
231 logger.write("host : " + self.host + "\n")
232 logger.write("port : " + str(self.port) + "\n")
233 logger.write("user : " + str(self.user) + "\n")
234 if self.successfully_connected(logger):
235 status = src.OK_STATUS
237 status = src.KO_STATUS
238 logger.write("Connection : " + status + "\n\n")
242 '''Class to manage one job
258 self.machine = machine
260 self.timeout = timeout
261 self.application = application
265 # The list of log files to download from the remote machine
266 self.remote_log_files = []
268 # The remote command status
269 # -1 means that it has not been launched,
270 # 0 means success and 1 means fail
272 self.cancelled = False
276 self._has_begun = False
277 self._has_finished = False
278 self._has_timouted = False
279 self._stdin = None # Store the command inputs field
280 self._stdout = None # Store the command outputs field
281 self._stderr = None # Store the command errors field
286 self.name_remote_jobs_pyconf = ".%s" % os.path.basename(job_file_path)
287 self.commands = commands
288 self.command = (os.path.join(self.machine.sat_path, "sat") +
290 os.path.join(self.machine.sat_path,
291 "list_log_files.txt") +
292 " job --jobs_config " +
293 os.path.join(self.machine.sat_path,
294 self.name_remote_jobs_pyconf) +
298 self.command = prefix + ' "' + self.command +'"'
301 """ Get the pid(s) corresponding to the command that have been launched
302 On the remote machine
304 :return: The list of integers corresponding to the found pids
308 cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\''
309 (_, out_pid, _) = self.machine.exec_command(cmd_pid, self.logger)
310 pids_cmd = out_pid.readlines()
311 pids_cmd = [str(src.only_numbers(pid)) for pid in pids_cmd]
315 def kill_remote_process(self, wait=1):
316 '''Kills the process on the remote machine.
318 :return: (the output of the kill, the error of the kill)
322 pids = self.get_pids()
324 return ("Unable to get the pid of the command.", "")
326 cmd_kill = " ; ".join([("kill -2 " + pid) for pid in pids])
327 (_, out_kill, err_kill) = self.machine.exec_command(cmd_kill,
330 return (out_kill.read().decode(), err_kill.read().decode())
333 '''Returns True if the job has already begun
335 :return: True if the job has already begun
338 return self._has_begun
340 def has_finished(self):
341 '''Returns True if the job has already finished
342 (i.e. all the commands have been executed)
343 If it is finished, the outputs are stored in the fields out and err.
345 :return: True if the job has already finished
349 # If the method has already been called and returned True
350 if self._has_finished:
353 # If the job has not begun yet
354 if not self.has_begun():
357 if self._stdout.channel.closed:
358 self._has_finished = True
359 # Store the result outputs
360 self.out += self._stdout.read().decode()
361 self.err += self._stderr.read().decode()
363 self._Tf = time.time()
364 # And get the remote command status and log files
367 except Exception as e:
368 self.err += _("Unable to get remote log files: %s" % e)
370 return self._has_finished
372 def get_log_files(self):
373 """Get the log files produced by the command launched
374 on the remote machine, and put it in the log directory of the user,
375 so they can be accessible from
377 # Do not get the files if the command is not finished
378 if not self.has_finished():
379 msg = _("Trying to get log files whereas the job is not finished.")
380 self.logger.write(src.printcolors.printcWarning(msg))
383 # First get the file that contains the list of log files to get
384 tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
385 remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt")
386 self.machine.sftp.get(
390 # Read the file and get the result of the command and all the log files
392 fstream_tmp = open(tmp_file_path, "r")
393 file_lines = fstream_tmp.readlines()
394 file_lines = [line.replace("\n", "") for line in file_lines]
396 os.remove(tmp_file_path)
399 # The first line is the result of the command (0 success or 1 fail)
400 self.res_job = file_lines[0]
401 except Exception as e:
402 self.err += _("Unable to get status from remote file %s: %s" %
403 (remote_path, str(e)))
405 for i, job_path_remote in enumerate(file_lines[1:]):
407 # For each command, there is two files to get :
408 # 1- The xml file describing the command and giving the
410 # 2- The txt file containing the system command traces (like
411 # traces produced by the "make" command)
412 # 3- In case of the test command, there is another file to get :
413 # the xml board that contain the test results
414 dirname = os.path.basename(os.path.dirname(job_path_remote))
415 if dirname != 'OUT' and dirname != 'TEST':
417 local_path = os.path.join(os.path.dirname(
418 self.logger.logFilePath),
419 os.path.basename(job_path_remote))
420 if i==0: # The first is the job command
421 self.logger.add_link(os.path.basename(job_path_remote),
425 elif dirname == 'OUT':
427 local_path = os.path.join(os.path.dirname(
428 self.logger.logFilePath),
430 os.path.basename(job_path_remote))
431 elif dirname == 'TEST':
433 local_path = os.path.join(os.path.dirname(
434 self.logger.logFilePath),
436 os.path.basename(job_path_remote))
439 if not os.path.exists(local_path):
440 self.machine.sftp.get(job_path_remote, local_path)
441 self.remote_log_files.append(local_path)
442 except Exception as e:
443 self.err += _("Unable to get %s log file from remote: %s" %
444 (str(job_path_remote),
447 def has_failed(self):
448 '''Returns True if the job has failed.
449 A job is considered as failed if the machine could not be reached,
450 if the remote command failed,
451 or if the job finished with a time out.
453 :return: True if the job has failed
456 if not self.has_finished():
458 if not self.machine.successfully_connected(self.logger):
460 if self.is_timeout():
462 if self.res_job == "1":
467 """In case of a failing job, one has to cancel every job that depend
468 on it. This method put the job as failed and will not be executed.
472 self._has_begun = True
473 self._has_finished = True
474 self.cancelled = True
475 self.out += _("This job was not launched because its father has failed.")
476 self.err += _("This job was not launched because its father has failed.")
478 def is_running(self):
479 '''Returns True if the job commands are running
481 :return: True if the job is running
484 return self.has_begun() and not self.has_finished()
486 def is_timeout(self):
487 '''Returns True if the job commands has finished with timeout
489 :return: True if the job has finished with timeout
492 return self._has_timouted
494 def time_elapsed(self):
495 """Get the time elapsed since the job launching
497 :return: The number of seconds
500 if not self.has_begun():
503 return T_now - self._T0
505 def check_time(self):
506 """Verify that the job has not exceeded its timeout.
507 If it has, kill the remote command and consider the job as finished.
509 if not self.has_begun():
511 if self.time_elapsed() > self.timeout:
512 self._has_finished = True
513 self._has_timouted = True
514 self._Tf = time.time()
515 (out_kill, __) = self.kill_remote_process()
516 self.out += "TIMEOUT \n" + out_kill
517 self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
520 except Exception as e:
521 # The 2 following lines must be suppressed after the bug is fixed
522 print("The error type: ")
526 print("Local scope:")
528 print("The str type: ")
532 self.err += _("Unable to get remote log files!")
534 def total_duration(self):
535 """Give the total duration of the job
537 :return: the total duration of the job in seconds
540 return self._Tf - self._T0
543 """Launch the job by executing the remote command.
546 # Prevent multiple run
548 msg = _("Warning: A job can only be launched one time")
549 msg2 = _("Trying to launch the job \"%s\" whereas it has "
550 "already been launched." % self.name)
551 self.logger.write(src.printcolors.printcWarning("%s\n%s\n" % (msg,
555 # Do not execute the command if the machine could not be reached
556 if not self.machine.successfully_connected(self.logger):
557 self._has_finished = True
559 self.err += ("Connection to machine (name : %s, host: %s, port:"
560 " %s, user: %s) has failed\nUse the log command "
561 "to get more information."
562 % (self.machine.name,
567 # Usual case : Launch the command on remote machine
568 self._T0 = time.time()
569 self._stdin, self._stdout, self._stderr = self.machine.exec_command(
572 # If the results are not initialized, finish the job
573 if (self._stdin, self._stdout, self._stderr) == (None, None, None):
574 self._has_finished = True
575 self._Tf = time.time()
577 self.err += "The server failed to execute the command"
579 # Put the beginning flag to true.
580 self._has_begun = True
582 def write_results(self):
583 """Display on the terminal all the job's information
585 self.logger.write("name : " + self.name + "\n")
587 self.logger.write("after : %s\n" % self.after)
588 self.logger.write("Time elapsed : %4imin %2is \n" %
589 (self.total_duration()//60 , self.total_duration()%60))
591 self.logger.write("Begin time : %s\n" %
592 time.strftime('%Y-%m-%d %H:%M:%S',
593 time.localtime(self._T0)) )
595 self.logger.write("End time : %s\n\n" %
596 time.strftime('%Y-%m-%d %H:%M:%S',
597 time.localtime(self._Tf)) )
599 machine_head = "Informations about connection :\n"
600 underline = (len(machine_head) - 2) * "-"
601 self.logger.write(src.printcolors.printcInfo(
602 machine_head+underline+"\n"))
603 self.machine.write_info(self.logger)
605 self.logger.write(src.printcolors.printcInfo("out : \n"))
607 self.logger.write("Unable to get output\n")
609 self.logger.write(self.out + "\n")
610 self.logger.write(src.printcolors.printcInfo("err : \n"))
611 self.logger.write(self.err + "\n")
613 def get_status(self):
614 """Get the status of the job (used by the Gui for xml display)
616 :return: The current status of the job
619 if not self.machine.successfully_connected(self.logger):
620 return "SSH connection KO"
621 if not self.has_begun():
622 return "Not launched"
625 if self.is_running():
626 return "running since " + time.strftime('%Y-%m-%d %H:%M:%S',
627 time.localtime(self._T0))
628 if self.has_finished():
629 if self.is_timeout():
630 return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S',
631 time.localtime(self._Tf))
632 return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S',
633 time.localtime(self._Tf))
636 '''Class to manage the jobs to be run
643 lenght_columns = 20):
644 # The jobs configuration
645 self.cfg_jobs = config_jobs
646 self.job_file_path = job_file_path
647 # The machine that will be used today
649 # The list of machine (hosts, port) that will be used today
650 # (a same host can have several machine instances since there
651 # can be several ssh parameters)
653 # The jobs to be launched today
655 # The jobs that will not be launched today
656 self.ljobs_not_today = []
659 self.len_columns = lenght_columns
661 # the list of jobs that have not been run yet
662 self._l_jobs_not_started = []
663 # the list of jobs that have already ran
664 self._l_jobs_finished = []
665 # the list of jobs that are running
666 self._l_jobs_running = []
668 self.determine_jobs_and_machines()
670 def define_job(self, job_def, machine):
671 '''Takes a pyconf job definition and a machine (from class machine)
672 and returns the job instance corresponding to the definition.
674 :param job_def src.config.Mapping: a job definition
675 :param machine machine: the machine on which the job will run
676 :return: The corresponding job in a job class instance
680 cmmnds = job_def.commands
681 if not "timeout" in job_def:
682 timeout = 4*60*60 # default timeout = 4h
684 timeout = job_def.timeout
686 if 'after' in job_def:
687 after = job_def.after
689 if 'application' in job_def:
690 application = job_def.application
692 if 'board' in job_def:
693 board = job_def.board
695 if "prefix" in job_def:
696 prefix = job_def.prefix
710 def determine_jobs_and_machines(self):
711 '''Function that reads the pyconf jobs definition and instantiates all
712 the machines and jobs to be done today.
717 today = datetime.date.weekday(datetime.date.today())
720 for job_def in self.cfg_jobs.jobs :
722 if not "machine" in job_def:
723 msg = _('WARNING: The job "%s" do not have the key '
724 '"machine", this job is ignored.\n\n' % job_def.name)
725 self.logger.write(src.printcolors.printcWarning(msg))
727 name_machine = job_def.machine
730 for mach in self.lmachines:
731 if mach.name == name_machine:
735 if a_machine == None:
736 for machine_def in self.cfg_jobs.machines:
737 if machine_def.name == name_machine:
738 if 'host' not in machine_def:
739 host = self.runner.cfg.VARS.hostname
741 host = machine_def.host
743 if 'user' not in machine_def:
744 user = self.runner.cfg.VARS.user
746 user = machine_def.user
748 if 'port' not in machine_def:
751 port = machine_def.port
753 if 'password' not in machine_def:
756 passwd = machine_def.password
758 if 'sat_path' not in machine_def:
759 sat_path = "salomeTools"
761 sat_path = machine_def.sat_path
772 self.lmachines.append(a_machine)
773 if (host, port) not in host_list:
774 host_list.append((host, port))
776 if a_machine == None:
777 msg = _("WARNING: The job \"%(job_name)s\" requires the "
778 "machine \"%(machine_name)s\" but this machine "
779 "is not defined in the configuration file.\n"
780 "The job will not be launched\n")
781 self.logger.write(src.printcolors.printcWarning(
782 msg % {"job_name" : job_def.name,
783 "machine_name" : name_machine}))
786 a_job = self.define_job(job_def, a_machine)
788 if today in job_def.when:
789 self.ljobs.append(a_job)
790 else: # today in job_def.when
791 self.ljobs_not_today.append(a_job)
793 self.lhosts = host_list
795 def ssh_connection_all_machines(self, pad=50):
796 '''Function that do the ssh connection to every machine
802 self.logger.write(src.printcolors.printcInfo((
803 "Establishing connection with all the machines :\n")))
804 for machine in self.lmachines:
805 # little algorithm in order to display traces
806 begin_line = (_("Connection to %s: " % machine.name))
807 if pad - len(begin_line) < 0:
810 endline = (pad - len(begin_line)) * "." + " "
812 step = "SSH connection"
813 self.logger.write( begin_line + endline + step)
815 # the call to the method that initiate the ssh connection
816 msg = machine.connect(self.logger)
818 # Copy salomeTools to the remote machine
819 if machine.successfully_connected(self.logger):
820 step = _("Remove SAT")
821 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
822 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
823 (__, out_dist, __) = machine.exec_command(
824 "rm -rf %s" % machine.sat_path,
830 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
831 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
833 res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway,
836 # set the local settings of sat on the remote machine using
838 (__, out_dist, __) = machine.exec_command(
839 os.path.join(machine.sat_path,
840 "sat init --base unknown --workdir"
841 " unknown --log_dir unknown"),
845 # get the remote machine distribution using a sat command
846 (__, out_dist, __) = machine.exec_command(
847 os.path.join(machine.sat_path,
848 "sat config --value VARS.dist --no_label"),
850 machine.distribution = out_dist.read().decode().replace("\n",
853 # Print the status of the copy
855 self.logger.write('\r%s' %
856 ((len(begin_line)+len(endline)+20) * " "), 3)
857 self.logger.write('\r%s%s%s' %
860 src.printcolors.printc(src.OK_STATUS)), 3)
862 self.logger.write('\r%s' %
863 ((len(begin_line)+len(endline)+20) * " "), 3)
864 self.logger.write('\r%s%s%s %s' %
867 src.printcolors.printc(src.KO_STATUS),
868 _("Copy of SAT failed: %s" % res_copy)), 3)
870 self.logger.write('\r%s' %
871 ((len(begin_line)+len(endline)+20) * " "), 3)
872 self.logger.write('\r%s%s%s %s' %
875 src.printcolors.printc(src.KO_STATUS),
877 self.logger.write("\n", 3)
879 self.logger.write("\n")
882 def is_occupied(self, hostname):
883 '''Function that returns True if a job is running on
884 the machine defined by its host and its port.
886 :param hostname (str, int): the pair (host, port)
887 :return: the job that is running on the host,
888 or false if there is no job running on the host.
893 for jb in self.ljobs:
894 if jb.machine.host == host and jb.machine.port == port:
899 def update_jobs_states_list(self):
900 '''Function that updates the lists that store the currently
901 running jobs and the jobs that have already finished.
906 jobs_finished_list = []
907 jobs_running_list = []
908 for jb in self.ljobs:
910 jobs_running_list.append(jb)
912 if jb.has_finished():
913 jobs_finished_list.append(jb)
915 nb_job_finished_before = len(self._l_jobs_finished)
916 self._l_jobs_finished = jobs_finished_list
917 self._l_jobs_running = jobs_running_list
919 nb_job_finished_now = len(self._l_jobs_finished)
921 return nb_job_finished_now > nb_job_finished_before
923 def cancel_dependencies_of_failing_jobs(self):
924 '''Function that cancels all the jobs that depend on a failing one.
930 for job in self.ljobs:
931 if job.after is None:
933 father_job = self.find_job_that_has_name(job.after)
934 if father_job is not None and father_job.has_failed():
937 def find_job_that_has_name(self, name):
938 '''Returns the job by its name.
940 :param name str: a job name
941 :return: the job that has the name.
944 for jb in self.ljobs:
947 # the following is executed only if the job was not found
950 def str_of_length(self, text, length):
951 '''Takes a string text of any length and returns
952 the most close string of length "length".
954 :param text str: any string
955 :param length int: a length for the returned string
956 :return: the most close string of length "length"
959 if len(text) > length:
960 text_out = text[:length-3] + '...'
962 diff = length - len(text)
963 before = " " * (diff//2)
964 after = " " * (diff//2 + diff%2)
965 text_out = before + text + after
969 def display_status(self, len_col):
970 '''Takes a lenght and construct the display of the current status
971 of the jobs in an array that has a column for each host.
972 It displays the job that is currently running on the host
975 :param len_col int: the size of the column
981 for host_port in self.lhosts:
982 jb = self.is_occupied(host_port)
983 if not jb: # nothing running on the host
984 empty = self.str_of_length("empty", len_col)
985 display_line += "|" + empty
987 display_line += "|" + src.printcolors.printcInfo(
988 self.str_of_length(jb.name, len_col))
990 self.logger.write("\r" + display_line + "|")
995 '''The main method. Runs all the jobs on every host.
996 For each host, at a given time, only one job can be running.
997 The jobs that have the field after (that contain the job that has
998 to be run before it) are run after the previous job.
999 This method stops when all the jobs are finished.
1006 self.logger.write(src.printcolors.printcInfo(
1007 _('Executing the jobs :\n')))
1009 for host_port in self.lhosts:
1012 if port == 22: # default value
1013 text_line += "|" + self.str_of_length(host, self.len_columns)
1015 text_line += "|" + self.str_of_length(
1016 "("+host+", "+str(port)+")", self.len_columns)
1018 tiret_line = " " + "-"*(len(text_line)-1) + "\n"
1019 self.logger.write(tiret_line)
1020 self.logger.write(text_line + "|\n")
1021 self.logger.write(tiret_line)
1024 # The infinite loop that runs the jobs
1025 l_jobs_not_started = src.deepcopy_list(self.ljobs)
1026 while len(self._l_jobs_finished) != len(self.ljobs):
1027 new_job_start = False
1028 for host_port in self.lhosts:
1030 if self.is_occupied(host_port):
1033 for jb in l_jobs_not_started:
1034 if (jb.machine.host, jb.machine.port) != host_port:
1036 if jb.after == None:
1038 l_jobs_not_started.remove(jb)
1039 new_job_start = True
1042 jb_before = self.find_job_that_has_name(jb.after)
1043 if jb_before is None:
1045 msg = _("This job was not launched because its "
1046 "father is not in the jobs list.")
1050 if jb_before.has_finished():
1052 l_jobs_not_started.remove(jb)
1053 new_job_start = True
1055 self.cancel_dependencies_of_failing_jobs()
1056 new_job_finished = self.update_jobs_states_list()
1058 if new_job_start or new_job_finished:
1060 self.gui.update_xml_files(self.ljobs)
1061 # Display the current status
1062 self.display_status(self.len_columns)
1064 # Make sure that the proc is not entirely busy
1067 self.logger.write("\n")
1068 self.logger.write(tiret_line)
1069 self.logger.write("\n\n")
1072 self.gui.update_xml_files(self.ljobs)
1073 self.gui.last_update()
1075 def write_all_results(self):
1076 '''Display all the jobs outputs.
1082 for jb in self.ljobs:
1083 self.logger.write(src.printcolors.printcLabel(
1084 "#------- Results for job %s -------#\n" % jb.name))
1086 self.logger.write("\n\n")
1089 '''Class to manage the the xml data that can be displayed in a browser to
1102 :param xml_dir_path str: The path to the directory where to put
1103 the xml resulting files
1104 :param l_jobs List: the list of jobs that run today
1105 :param l_jobs_not_today List: the list of jobs that do not run today
1106 :param file_boards str: the file path from which to read the
1109 # The logging instance
1110 self.logger = logger
1112 # The prefix to add to the xml files : date_hour
1113 self.prefix = prefix
1115 # The path of the csv files to read to fill the expected boards
1116 self.file_boards = file_boards
1118 if file_boards != "":
1119 today = datetime.date.weekday(datetime.date.today())
1120 self.parse_csv_boards(today)
1122 self.d_input_boards = {}
1124 # The path of the global xml file
1125 self.xml_dir_path = xml_dir_path
1126 # Initialize the xml files
1127 self.global_name = "global_report"
1128 xml_global_path = os.path.join(self.xml_dir_path,
1129 self.global_name + ".xml")
1130 self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path,
1133 # Find history for each job
1135 self.find_history(l_jobs, l_jobs_not_today)
1137 # The xml files that corresponds to the boards.
1138 # {name_board : xml_object}}
1139 self.d_xml_board_files = {}
1141 # Create the lines and columns
1142 self.initialize_boards(l_jobs, l_jobs_not_today)
1144 # Write the xml file
1145 self.update_xml_files(l_jobs)
1147 def add_xml_board(self, name):
1148 '''Add a board to the board list
1149 :param name str: the board name
1151 xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
1152 self.d_xml_board_files[name] = src.xmlManager.XmlLogFile(
1155 self.d_xml_board_files[name].add_simple_node("distributions")
1156 self.d_xml_board_files[name].add_simple_node("applications")
1157 self.d_xml_board_files[name].add_simple_node("board", text=name)
1159 def initialize_boards(self, l_jobs, l_jobs_not_today):
1160 '''Get all the first information needed for each file and write the
1161 first version of the files
1162 :param l_jobs List: the list of jobs that run today
1163 :param l_jobs_not_today List: the list of jobs that do not run today
1165 # Get the boards to fill and put it in a dictionary
1166 # {board_name : xml instance corresponding to the board}
1167 for job in l_jobs + l_jobs_not_today:
1169 if (board is not None and
1170 board not in self.d_xml_board_files.keys()):
1171 self.add_xml_board(board)
1173 # Verify that the boards given as input are done
1174 for board in list(self.d_input_boards.keys()):
1175 if board not in self.d_xml_board_files:
1176 self.add_xml_board(board)
1177 root_node = self.d_xml_board_files[board].xmlroot
1178 src.xmlManager.append_node_attrib(root_node,
1179 {"input_file" : self.file_boards})
1181 # Loop over all jobs in order to get the lines and columns for each
1185 for board in self.d_xml_board_files:
1187 d_application[board] = []
1191 for job in l_jobs + l_jobs_not_today:
1193 if (job.machine.host, job.machine.port) not in l_hosts_ports:
1194 l_hosts_ports.append((job.machine.host, job.machine.port))
1196 distrib = job.machine.distribution
1197 application = job.application
1199 board_job = job.board
1202 for board in self.d_xml_board_files:
1203 if board_job == board:
1204 if (distrib not in [None, ''] and
1205 distrib not in d_dist[board]):
1206 d_dist[board].append(distrib)
1207 src.xmlManager.add_simple_node(
1208 self.d_xml_board_files[board].xmlroot.find(
1211 attrib={"name" : distrib})
1213 if board_job == board:
1214 if (application not in [None, ''] and
1215 application not in d_application[board]):
1216 d_application[board].append(application)
1217 src.xmlManager.add_simple_node(
1218 self.d_xml_board_files[board].xmlroot.find(
1222 "name" : application})
1224 # Verify that there are no missing application or distribution in the
1225 # xml board files (regarding the input boards)
1226 for board in self.d_xml_board_files:
1227 l_dist = d_dist[board]
1228 if board not in self.d_input_boards.keys():
1230 for dist in self.d_input_boards[board]["rows"]:
1231 if dist not in l_dist:
1232 src.xmlManager.add_simple_node(
1233 self.d_xml_board_files[board].xmlroot.find(
1236 attrib={"name" : dist})
1237 l_appli = d_application[board]
1238 for appli in self.d_input_boards[board]["columns"]:
1239 if appli not in l_appli:
1240 src.xmlManager.add_simple_node(
1241 self.d_xml_board_files[board].xmlroot.find(
1244 attrib={"name" : appli})
1246 # Initialize the hosts_ports node for the global file
1247 self.xmlhosts_ports = self.xml_global_file.add_simple_node(
1249 for host, port in l_hosts_ports:
1250 host_port = "%s:%i" % (host, port)
1251 src.xmlManager.add_simple_node(self.xmlhosts_ports,
1253 attrib={"name" : host_port})
1255 # Initialize the jobs node in all files
1256 for xml_file in [self.xml_global_file] + list(
1257 self.d_xml_board_files.values()):
1258 xml_jobs = xml_file.add_simple_node("jobs")
1259 # Get the jobs present in the config file but
1260 # that will not be launched today
1261 self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
1263 # add also the infos node
1264 xml_file.add_simple_node("infos",
1265 attrib={"name" : "last update",
1266 "JobsCommandStatus" : "running"})
1268 # and put the history node
1269 history_node = xml_file.add_simple_node("history")
1270 name_board = os.path.basename(xml_file.logFile)[:-len(".xml")]
1271 # serach for board files
1272 expression = "^[0-9]{8}_+[0-9]{6}_" + name_board + ".xml$"
1273 oExpr = re.compile(expression)
1274 # Get the list of xml borad files that are in the log directory
1275 for file_name in os.listdir(self.xml_dir_path):
1276 if oExpr.search(file_name):
1277 date = os.path.basename(file_name).split("_")[0]
1278 file_path = os.path.join(self.xml_dir_path, file_name)
1279 src.xmlManager.add_simple_node(history_node,
1282 attrib={"date" : date})
1285 # Find in each board the squares that needs to be filled regarding the
1286 # input csv files but that are not covered by a today job
1287 for board in self.d_input_boards.keys():
1288 xml_root_board = self.d_xml_board_files[board].xmlroot
1289 # Find the missing jobs for today
1290 xml_missing = src.xmlManager.add_simple_node(xml_root_board,
1292 for row, column in self.d_input_boards[board]["jobs"]:
1295 if (job.application == column and
1296 job.machine.distribution == row):
1300 src.xmlManager.add_simple_node(xml_missing,
1302 attrib={"distribution" : row,
1303 "application" : column })
1304 # Find the missing jobs not today
1305 xml_missing_not_today = src.xmlManager.add_simple_node(
1307 "missing_jobs_not_today")
1308 for row, column in self.d_input_boards[board]["jobs_not_today"]:
1310 for job in l_jobs_not_today:
1311 if (job.application == column and
1312 job.machine.distribution == row):
1316 src.xmlManager.add_simple_node(xml_missing_not_today,
1318 attrib={"distribution" : row,
1319 "application" : column })
1321 def find_history(self, l_jobs, l_jobs_not_today):
1322 """find, for each job, in the existent xml boards the results for the
1323 job. Store the results in the dictionnary self.history = {name_job :
1324 list of (date, status, list links)}
1326 :param l_jobs List: the list of jobs to run today
1327 :param l_jobs_not_today List: the list of jobs that do not run today
1329 # load the all the history
1330 expression = "^[0-9]{8}_+[0-9]{6}_" + self.global_name + ".xml$"
1331 oExpr = re.compile(expression)
1332 # Get the list of global xml that are in the log directory
1334 for file_name in os.listdir(self.xml_dir_path):
1335 if oExpr.search(file_name):
1336 file_path = os.path.join(self.xml_dir_path, file_name)
1338 global_xml = src.xmlManager.ReadXmlFile(file_path)
1339 l_globalxml.append(global_xml)
1340 except Exception as e:
1341 msg = _("\nWARNING: the file %s can not be read, it will be "
1342 "ignored\n%s" % (file_path, e))
1343 self.logger.write("%s\n" % src.printcolors.printcWarning(
1346 # Construct the dictionnary self.history
1347 for job in l_jobs + l_jobs_not_today:
1349 for global_xml in l_globalxml:
1350 date = os.path.basename(global_xml.filePath).split("_")[0]
1351 global_root_node = global_xml.xmlroot.find("jobs")
1352 job_node = src.xmlManager.find_node_by_attrib(
1358 if job_node.find("remote_log_file_path") is not None:
1359 link = job_node.find("remote_log_file_path").text
1360 res_job = job_node.find("res").text
1361 if link != "nothing":
1362 l_links.append((date, res_job, link))
1363 l_links = sorted(l_links, reverse=True)
1364 self.history[job.name] = l_links
1366 def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
1367 '''Get all the first information needed for each file and write the
1368 first version of the files
1370 :param xml_node_jobs etree.Element: the node corresponding to a job
1371 :param l_jobs_not_today List: the list of jobs that do not run today
1373 for job in l_jobs_not_today:
1374 xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1376 attrib={"name" : job.name})
1377 src.xmlManager.add_simple_node(xmlj, "application", job.application)
1378 src.xmlManager.add_simple_node(xmlj,
1380 job.machine.distribution)
1381 src.xmlManager.add_simple_node(xmlj, "board", job.board)
1382 src.xmlManager.add_simple_node(xmlj,
1383 "commands", " ; ".join(job.commands))
1384 src.xmlManager.add_simple_node(xmlj, "state", "Not today")
1385 src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1386 src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1387 src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1388 src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1389 src.xmlManager.add_simple_node(xmlj, "sat_path",
1390 job.machine.sat_path)
1391 xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1392 for i, (date, res_job, link) in enumerate(self.history[job.name]):
1394 # tag the first one (the last one)
1395 src.xmlManager.add_simple_node(xml_history,
1398 attrib={"date" : date,
1402 src.xmlManager.add_simple_node(xml_history,
1405 attrib={"date" : date,
1409 def parse_csv_boards(self, today):
1410 """ Parse the csv file that describes the boards to produce and fill
1411 the dict d_input_boards that contain the csv file contain
1413 :param today int: the current day of the week
1415 # open the csv file and read its content
1417 with open(self.file_boards, 'r') as f:
1418 reader = csv.reader(f,delimiter=CSV_DELIMITER)
1421 # get the delimiter for the boards (empty line)
1422 boards_delimiter = [''] * len(l_read[0])
1423 # Make the list of boards, by splitting with the delimiter
1424 l_boards = [list(y) for x, y in itertools.groupby(l_read,
1425 lambda z: z == boards_delimiter) if not x]
1427 # loop over the csv lists of lines and get the rows, columns and jobs
1429 for input_board in l_boards:
1431 board_name = input_board[0][0]
1434 columns = input_board[0][1:]
1439 for line in input_board[1:]:
1442 for i, square in enumerate(line[1:]):
1445 days = square.split(DAYS_SEPARATOR)
1446 days = [int(day) for day in days]
1447 job = (row, columns[i])
1451 jobs_not_today.append(job)
1453 d_boards[board_name] = {"rows" : rows,
1454 "columns" : columns,
1456 "jobs_not_today" : jobs_not_today}
1458 self.d_input_boards = d_boards
1460 def update_xml_files(self, l_jobs):
1461 '''Write all the xml files with updated information about the jobs
1463 :param l_jobs List: the list of jobs that run today
1465 for xml_file in [self.xml_global_file] + list(
1466 self.d_xml_board_files.values()):
1467 self.update_xml_file(l_jobs, xml_file)
1470 self.write_xml_files()
1472 def update_xml_file(self, l_jobs, xml_file):
1473 '''update information about the jobs for the file xml_file
1475 :param l_jobs List: the list of jobs that run today
1476 :param xml_file xmlManager.XmlLogFile: the xml instance to update
1479 xml_node_jobs = xml_file.xmlroot.find('jobs')
1480 # Update the job names and status node
1482 # Find the node corresponding to the job and delete it
1483 # in order to recreate it
1484 for xmljob in xml_node_jobs.findall('job'):
1485 if xmljob.attrib['name'] == job.name:
1486 xml_node_jobs.remove(xmljob)
1490 T0 = time.strftime('%Y-%m-%d %H:%M:%S',
1491 time.localtime(job._T0))
1494 Tf = time.strftime('%Y-%m-%d %H:%M:%S',
1495 time.localtime(job._Tf))
1497 # recreate the job node
1498 xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1500 attrib={"name" : job.name})
1501 src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1502 src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1503 src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1504 src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1505 xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1506 for date, res_job, link in self.history[job.name]:
1507 src.xmlManager.add_simple_node(xml_history,
1510 attrib={"date" : date,
1513 src.xmlManager.add_simple_node(xmlj, "sat_path",
1514 job.machine.sat_path)
1515 src.xmlManager.add_simple_node(xmlj, "application", job.application)
1516 src.xmlManager.add_simple_node(xmlj, "distribution",
1517 job.machine.distribution)
1518 src.xmlManager.add_simple_node(xmlj, "board", job.board)
1519 src.xmlManager.add_simple_node(xmlj, "timeout", str(job.timeout))
1520 src.xmlManager.add_simple_node(xmlj, "commands",
1521 " ; ".join(job.commands))
1522 src.xmlManager.add_simple_node(xmlj, "state", job.get_status())
1523 src.xmlManager.add_simple_node(xmlj, "begin", T0)
1524 src.xmlManager.add_simple_node(xmlj, "end", Tf)
1525 src.xmlManager.add_simple_node(xmlj, "out",
1526 src.printcolors.cleancolor(job.out))
1527 src.xmlManager.add_simple_node(xmlj, "err",
1528 src.printcolors.cleancolor(job.err))
1529 src.xmlManager.add_simple_node(xmlj, "res", str(job.res_job))
1530 if len(job.remote_log_files) > 0:
1531 src.xmlManager.add_simple_node(xmlj,
1532 "remote_log_file_path",
1533 job.remote_log_files[0])
1535 src.xmlManager.add_simple_node(xmlj,
1536 "remote_log_file_path",
1538 # Search for the test log if there is any
1539 l_test_log_files = self.find_test_log(job.remote_log_files)
1540 xml_test = src.xmlManager.add_simple_node(xmlj,
1541 "test_log_file_path")
1542 for test_log_path, res_test, nb_fails in l_test_log_files:
1543 test_path_node = src.xmlManager.add_simple_node(xml_test,
1546 test_path_node.attrib["res"] = res_test
1547 test_path_node.attrib["nb_fails"] = nb_fails
1549 xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
1550 # get the job father
1551 if job.after is not None:
1554 if jb.name == job.after:
1557 if (job_father is not None and
1558 len(job_father.remote_log_files) > 0):
1559 link = job_father.remote_log_files[0]
1562 src.xmlManager.append_node_attrib(xmlafter, {"link" : link})
1564 # Verify that the job is to be done today regarding the input csv
1566 if job.board and job.board in self.d_input_boards.keys():
1568 for dist, appli in self.d_input_boards[job.board]["jobs"]:
1569 if (job.machine.distribution == dist
1570 and job.application == appli):
1572 src.xmlManager.add_simple_node(xmlj,
1577 src.xmlManager.add_simple_node(xmlj,
1583 xml_node_infos = xml_file.xmlroot.find('infos')
1584 src.xmlManager.append_node_attrib(xml_node_infos,
1586 datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
1589 def find_test_log(self, l_remote_log_files):
1590 '''Find if there is a test log (board) in the remote log files and
1591 the path to it. There can be several test command, so the result is
1594 :param l_remote_log_files List: the list of all remote log files
1595 :return: the list of (test log files path, res of the command)
1599 for file_path in l_remote_log_files:
1600 dirname = os.path.basename(os.path.dirname(file_path))
1601 file_name = os.path.basename(file_path)
1602 regex = src.logger.log_all_command_file_expression
1603 oExpr = re.compile(regex)
1604 if dirname == "TEST" and oExpr.search(file_name):
1605 # find the res of the command
1606 prod_node = etree.parse(file_path).getroot().find("product")
1607 res_test = prod_node.attrib["global_res"]
1608 # find the number of fails
1609 testbase_node = prod_node.find("tests").find("testbase")
1610 nb_fails = int(testbase_node.attrib["failed"])
1611 # put the file path, the res of the test command and the number
1612 # of fails in the output
1613 res.append((file_path, res_test, nb_fails))
1617 def last_update(self, finish_status = "finished"):
1618 '''update information about the jobs for the file xml_file
1620 :param l_jobs List: the list of jobs that run today
1621 :param xml_file xmlManager.XmlLogFile: the xml instance to update
1623 for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
1624 xml_node_infos = xml_file.xmlroot.find('infos')
1625 src.xmlManager.append_node_attrib(xml_node_infos,
1626 attrib={"JobsCommandStatus" : finish_status})
1628 self.write_xml_files()
1630 def write_xml_file(self, xml_file, stylesheet):
1631 ''' Write one xml file and the same file with prefix
1633 xml_file.write_tree(stylesheet)
1634 file_path = xml_file.logFile
1635 file_dir = os.path.dirname(file_path)
1636 file_name = os.path.basename(file_path)
1637 file_name_with_prefix = self.prefix + "_" + file_name
1638 xml_file.write_tree(stylesheet, os.path.join(file_dir,
1639 file_name_with_prefix))
1641 def write_xml_files(self):
1642 ''' Write the xml files
1644 self.write_xml_file(self.xml_global_file, STYLESHEET_GLOBAL)
1645 for xml_file in self.d_xml_board_files.values():
1646 self.write_xml_file(xml_file, STYLESHEET_BOARD)
1648 def get_config_file_path(job_config_name, l_cfg_dir):
1650 file_jobs_cfg = None
1651 if os.path.exists(job_config_name) and job_config_name.endswith(".pyconf"):
1653 file_jobs_cfg = job_config_name
1655 for cfg_dir in l_cfg_dir:
1656 file_jobs_cfg = os.path.join(cfg_dir, job_config_name)
1657 if not file_jobs_cfg.endswith('.pyconf'):
1658 file_jobs_cfg += '.pyconf'
1660 if not os.path.exists(file_jobs_cfg):
1665 return found, file_jobs_cfg
1668 # Describes the command
1670 return _("The jobs command launches maintenances that are described"
1671 " in the dedicated jobs configuration file.\n\nexample:\nsat "
1672 "jobs --name my_jobs --publish")
1676 def run(args, runner, logger):
1678 (options, args) = parser.parse_args(args)
1680 l_cfg_dir = runner.cfg.PATHS.JOBPATH
1682 # list option : display all the available config files
1684 for cfg_dir in l_cfg_dir:
1685 if not options.no_label:
1686 logger.write("------ %s\n" %
1687 src.printcolors.printcHeader(cfg_dir))
1688 if not os.path.exists(cfg_dir):
1690 for f in sorted(os.listdir(cfg_dir)):
1691 if not f.endswith('.pyconf'):
1694 logger.write("%s\n" % cfilename)
1697 # Make sure the jobs_config option has been called
1698 if not options.jobs_cfg:
1699 message = _("The option --jobs_config is required\n")
1700 src.printcolors.printcError(message)
1703 # Find the file in the directories, unless it is a full path
1704 # merge all in a config
1705 merger = src.pyconf.ConfigMerger()
1706 config_jobs = src.pyconf.Config()
1707 l_conf_files_path = []
1708 for config_file in options.jobs_cfg:
1709 found, file_jobs_cfg = get_config_file_path(config_file, l_cfg_dir)
1711 msg = _("The file configuration %s was not found."
1712 "\nUse the --list option to get the "
1713 "possible files." % config_file)
1714 logger.write("%s\n" % src.printcolors.printcError(msg), 1)
1716 l_conf_files_path.append(file_jobs_cfg)
1717 # Read the config that is in the file
1718 one_config_jobs = src.read_config_from_a_file(file_jobs_cfg)
1719 merger.merge(config_jobs, one_config_jobs)
1722 (_("Platform"), runner.cfg.VARS.dist),
1723 (_("Files containing the jobs configuration"), l_conf_files_path)
1725 src.print_info(logger, info)
1727 if options.only_jobs:
1728 l_jb = src.pyconf.Sequence()
1729 for jb in config_jobs.jobs:
1730 if jb.name in options.only_jobs:
1732 "Job that was given in only_jobs option parameters\n")
1733 config_jobs.jobs = l_jb
1735 # Make a unique file that contain all the jobs in order to use it
1737 name_pyconf = "_".join([os.path.basename(path)[:-len('.pyconf')]
1738 for path in l_conf_files_path]) + ".pyconf"
1739 path_pyconf = src.get_tmp_filename(runner.cfg, name_pyconf)
1741 f = file( path_pyconf , 'w')
1742 config_jobs.__save__(f)
1744 # log the paramiko problems
1745 log_dir = src.get_log_path(runner.cfg)
1746 paramiko_log_dir_path = os.path.join(log_dir, "JOBS")
1747 src.ensure_path_exists(paramiko_log_dir_path)
1748 paramiko.util.log_to_file(os.path.join(paramiko_log_dir_path,
1749 logger.txtFileName))
1752 today_jobs = Jobs(runner,
1756 # SSH connection to all machines
1757 today_jobs.ssh_connection_all_machines()
1758 if options.test_connection:
1763 logger.write(src.printcolors.printcInfo(
1764 _("Initialize the xml boards : ")), 5)
1767 # Copy the stylesheets in the log directory
1769 xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
1771 files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
1772 files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD))
1773 files_to_copy.append(os.path.join(xsl_dir, "running.gif"))
1774 for file_path in files_to_copy:
1775 shutil.copy2(file_path, log_dir)
1777 # Instanciate the Gui in order to produce the xml files that contain all
1781 today_jobs.ljobs_not_today,
1782 runner.cfg.VARS.datehour,
1784 file_boards = options.input_boards)
1786 logger.write(src.printcolors.printcSuccess("OK"), 5)
1787 logger.write("\n\n", 5)
1790 # Display the list of the xml files
1791 logger.write(src.printcolors.printcInfo(("Here is the list of published"
1793 logger.write("%s\n" % gui.xml_global_file.logFile, 4)
1794 for board in gui.d_xml_board_files.keys():
1795 file_path = gui.d_xml_board_files[board].logFile
1796 file_name = os.path.basename(file_path)
1797 logger.write("%s\n" % file_path, 4)
1798 logger.add_link(file_name, "board", 0, board)
1800 logger.write("\n", 4)
1802 today_jobs.gui = gui
1806 # Run all the jobs contained in config_jobs
1807 today_jobs.run_jobs()
1808 except KeyboardInterrupt:
1810 logger.write("\n\n%s\n\n" %
1811 (src.printcolors.printcWarning(_("Forced interruption"))), 1)
1812 except Exception as e:
1813 msg = _("CRITICAL ERROR: The jobs loop has been interrupted\n")
1814 logger.write("\n\n%s\n" % src.printcolors.printcError(msg) )
1815 logger.write("%s\n" % str(e))
1817 __, __, exc_traceback = sys.exc_info()
1818 fp = tempfile.TemporaryFile()
1819 traceback.print_tb(exc_traceback, file=fp)
1822 logger.write("\nTRACEBACK: %s\n" % stack.replace('"',"'"), 1)
1828 msg = _("Killing the running jobs and trying"
1829 " to get the corresponding logs\n")
1830 logger.write(src.printcolors.printcWarning(msg))
1832 # find the potential not finished jobs and kill them
1833 for jb in today_jobs.ljobs:
1834 if not jb.has_finished():
1837 jb.kill_remote_process()
1838 except Exception as e:
1839 msg = _("Failed to kill job %s: %s\n" % (jb.name, e))
1840 logger.write(src.printcolors.printcWarning(msg))
1841 if jb.res_job != "0":
1845 today_jobs.gui.last_update(_("Forced interruption"))
1848 today_jobs.gui.last_update()
1849 # Output the results
1850 today_jobs.write_all_results()
1851 # Remove the temporary pyconf file
1852 if os.path.exists(path_pyconf):
1853 os.remove(path_pyconf)