3 # Copyright (C) 2010-2013 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 import src.ElementTree as etree
34 STYLESHEET_GLOBAL = "jobs_global_report.xsl"
35 STYLESHEET_BOARD = "jobs_board_report.xsl"
40 parser = src.options.Options()
42 parser.add_option('n', 'name', 'list2', 'jobs_cfg',
43 _('Mandatory: The name of the config file that contains'
44 ' the jobs configuration. Can be a list.'))
45 parser.add_option('o', 'only_jobs', 'list2', 'only_jobs',
46 _('Optional: the list of jobs to launch, by their name. '))
47 parser.add_option('l', 'list', 'boolean', 'list',
48 _('Optional: list all available config files.'))
49 parser.add_option('t', 'test_connection', 'boolean', 'test_connection',
50 _("Optional: try to connect to the machines. "
51 "Not executing the jobs."),
53 parser.add_option('p', 'publish', 'boolean', 'publish',
54 _("Optional: generate an xml file that can be read in a "
55 "browser to display the jobs status."),
57 parser.add_option('i', 'input_boards', 'string', 'input_boards', _("Optional: "
58 "the path to csv file that contain "
59 "the expected boards."),"")
60 parser.add_option('', 'completion', 'boolean', 'no_label',
61 _("Optional (internal use): do not print labels, Works only "
65 class Machine(object):
66 '''Class to manage a ssh connection on a machine
74 sat_path="salomeTools"):
78 self.distribution = None # Will be filled after copying SAT on the machine
80 self.password = passwd
81 self.sat_path = sat_path
82 self.ssh = paramiko.SSHClient()
83 self._connection_successful = None
85 def connect(self, logger):
86 '''Initiate the ssh connection to the remote machine
88 :param logger src.logger.Logger: The logger instance
93 self._connection_successful = False
94 self.ssh.load_system_host_keys()
95 self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
97 self.ssh.connect(self.host,
100 password = self.password)
101 except paramiko.AuthenticationException:
102 message = src.KO_STATUS + _("Authentication failed")
103 except paramiko.BadHostKeyException:
104 message = (src.KO_STATUS +
105 _("The server's host key could not be verified"))
106 except paramiko.SSHException:
107 message = ( _("SSHException error connecting or "
108 "establishing an SSH session"))
110 message = ( _("Error connecting or establishing an SSH session"))
112 self._connection_successful = True
116 def successfully_connected(self, logger):
117 '''Verify if the connection to the remote machine has succeed
119 :param logger src.logger.Logger: The logger instance
120 :return: True if the connection has succeed, False if not
123 if self._connection_successful == None:
124 message = _("Warning : trying to ask if the connection to "
125 "(name: %s host: %s, port: %s, user: %s) is OK whereas there were"
126 " no connection request" %
127 (self.name, self.host, self.port, self.user))
128 logger.write( src.printcolors.printcWarning(message))
129 return self._connection_successful
131 def copy_sat(self, sat_local_path, job_file):
132 '''Copy salomeTools to the remote machine in self.sat_path
136 # open a sftp connection
137 self.sftp = self.ssh.open_sftp()
138 # Create the sat directory on remote machine if it is not existing
139 self.mkdir(self.sat_path, ignore_existing=True)
141 self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
142 # put the job configuration file in order to make it reachable
143 # on the remote machine
144 remote_job_file_name = ".%s" % os.path.basename(job_file)
145 self.sftp.put(job_file, os.path.join(self.sat_path,
146 remote_job_file_name))
147 except Exception as e:
149 self._connection_successful = False
153 def put_dir(self, source, target, filters = []):
154 ''' Uploads the contents of the source directory to the target path. The
155 target directory needs to exists. All sub-directories in source are
156 created under target.
158 for item in os.listdir(source):
161 source_path = os.path.join(source, item)
162 destination_path = os.path.join(target, item)
163 if os.path.islink(source_path):
164 linkto = os.readlink(source_path)
166 self.sftp.symlink(linkto, destination_path)
167 self.sftp.chmod(destination_path,
168 os.stat(source_path).st_mode)
172 if os.path.isfile(source_path):
173 self.sftp.put(source_path, destination_path)
174 self.sftp.chmod(destination_path,
175 os.stat(source_path).st_mode)
177 self.mkdir(destination_path, ignore_existing=True)
178 self.put_dir(source_path, destination_path)
180 def mkdir(self, path, mode=511, ignore_existing=False):
181 ''' Augments mkdir by adding an option to not fail
185 self.sftp.mkdir(path, mode)
192 def exec_command(self, command, logger):
193 '''Execute the command on the remote machine
195 :param command str: The command to be run
196 :param logger src.logger.Logger: The logger instance
197 :return: the stdin, stdout, and stderr of the executing command,
199 :rtype: (paramiko.channel.ChannelFile, paramiko.channel.ChannelFile,
200 paramiko.channel.ChannelFile)
203 # Does not wait the end of the command
204 (stdin, stdout, stderr) = self.ssh.exec_command(command)
205 except paramiko.SSHException:
206 message = src.KO_STATUS + _(
207 ": the server failed to execute the command\n")
208 logger.write( src.printcolors.printcError(message))
209 return (None, None, None)
211 logger.write( src.printcolors.printcError(src.KO_STATUS + '\n'))
212 return (None, None, None)
214 return (stdin, stdout, stderr)
217 '''Close the ssh connection
223 def write_info(self, logger):
224 '''Prints the informations relative to the machine in the logger
225 (terminal traces and log file)
227 :param logger src.logger.Logger: The logger instance
231 logger.write("host : " + self.host + "\n")
232 logger.write("port : " + str(self.port) + "\n")
233 logger.write("user : " + str(self.user) + "\n")
234 if self.successfully_connected(logger):
235 status = src.OK_STATUS
237 status = src.KO_STATUS
238 logger.write("Connection : " + status + "\n\n")
242 '''Class to manage one job
258 self.machine = machine
260 self.timeout = timeout
261 self.application = application
265 # The list of log files to download from the remote machine
266 self.remote_log_files = []
268 # The remote command status
269 # -1 means that it has not been launched,
270 # 0 means success and 1 means fail
272 self.cancelled = False
276 self._has_begun = False
277 self._has_finished = False
278 self._has_timouted = False
279 self._stdin = None # Store the command inputs field
280 self._stdout = None # Store the command outputs field
281 self._stderr = None # Store the command errors field
286 self.name_remote_jobs_pyconf = ".%s" % os.path.basename(job_file_path)
287 self.commands = commands
288 self.command = (os.path.join(self.machine.sat_path, "sat") +
290 os.path.join(self.machine.sat_path,
291 "list_log_files.txt") +
292 " job --jobs_config " +
293 os.path.join(self.machine.sat_path,
294 self.name_remote_jobs_pyconf) +
295 " --name " + self.name)
297 self.command = prefix + ' "' + self.command +'"'
300 """ Get the pid(s) corresponding to the command that have been launched
301 On the remote machine
303 :return: The list of integers corresponding to the found pids
307 cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\''
308 (_, out_pid, _) = self.machine.exec_command(cmd_pid, self.logger)
309 pids_cmd = out_pid.readlines()
310 pids_cmd = [str(src.only_numbers(pid)) for pid in pids_cmd]
314 def kill_remote_process(self, wait=1):
315 '''Kills the process on the remote machine.
317 :return: (the output of the kill, the error of the kill)
321 pids = self.get_pids()
323 return ("Unable to get the pid of the command.", "")
325 cmd_kill = " ; ".join([("kill -2 " + pid) for pid in pids])
326 (_, out_kill, err_kill) = self.machine.exec_command(cmd_kill,
329 return (out_kill.read().decode(), err_kill.read().decode())
332 '''Returns True if the job has already begun
334 :return: True if the job has already begun
337 return self._has_begun
339 def has_finished(self):
340 '''Returns True if the job has already finished
341 (i.e. all the commands have been executed)
342 If it is finished, the outputs are stored in the fields out and err.
344 :return: True if the job has already finished
348 # If the method has already been called and returned True
349 if self._has_finished:
352 # If the job has not begun yet
353 if not self.has_begun():
356 if self._stdout.channel.closed:
357 self._has_finished = True
358 # Store the result outputs
359 self.out += self._stdout.read().decode()
360 self.err += self._stderr.read().decode()
362 self._Tf = time.time()
363 # And get the remote command status and log files
366 except Exception as e:
367 self.err += _("Unable to get remote log files: %s" % e)
369 return self._has_finished
371 def get_log_files(self):
372 """Get the log files produced by the command launched
373 on the remote machine, and put it in the log directory of the user,
374 so they can be accessible from
376 # Do not get the files if the command is not finished
377 if not self.has_finished():
378 msg = _("Trying to get log files whereas the job is not finished.")
379 self.logger.write(src.printcolors.printcWarning(msg))
382 # First get the file that contains the list of log files to get
383 tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
384 remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt")
385 self.machine.sftp.get(
389 # Read the file and get the result of the command and all the log files
391 fstream_tmp = open(tmp_file_path, "r")
392 file_lines = fstream_tmp.readlines()
393 file_lines = [line.replace("\n", "") for line in file_lines]
395 os.remove(tmp_file_path)
398 # The first line is the result of the command (0 success or 1 fail)
399 self.res_job = file_lines[0]
400 except Exception as e:
401 self.err += _("Unable to get status from remote file %s: %s" %
402 (remote_path, str(e)))
404 for i, job_path_remote in enumerate(file_lines[1:]):
406 # For each command, there is two files to get :
407 # 1- The xml file describing the command and giving the
409 # 2- The txt file containing the system command traces (like
410 # traces produced by the "make" command)
411 # 3- In case of the test command, there is another file to get :
412 # the xml board that contain the test results
413 dirname = os.path.basename(os.path.dirname(job_path_remote))
414 if dirname != 'OUT' and dirname != 'TEST':
416 local_path = os.path.join(os.path.dirname(
417 self.logger.logFilePath),
418 os.path.basename(job_path_remote))
419 if i==0: # The first is the job command
420 self.logger.add_link(os.path.basename(job_path_remote),
424 elif dirname == 'OUT':
426 local_path = os.path.join(os.path.dirname(
427 self.logger.logFilePath),
429 os.path.basename(job_path_remote))
430 elif dirname == 'TEST':
432 local_path = os.path.join(os.path.dirname(
433 self.logger.logFilePath),
435 os.path.basename(job_path_remote))
438 if not os.path.exists(local_path):
439 self.machine.sftp.get(job_path_remote, local_path)
440 self.remote_log_files.append(local_path)
441 except Exception as e:
442 self.err += _("Unable to get %s log file from remote: %s" %
443 (str(job_path_remote),
446 def has_failed(self):
447 '''Returns True if the job has failed.
448 A job is considered as failed if the machine could not be reached,
449 if the remote command failed,
450 or if the job finished with a time out.
452 :return: True if the job has failed
455 if not self.has_finished():
457 if not self.machine.successfully_connected(self.logger):
459 if self.is_timeout():
461 if self.res_job == "1":
466 """In case of a failing job, one has to cancel every job that depend
467 on it. This method put the job as failed and will not be executed.
471 self._has_begun = True
472 self._has_finished = True
473 self.cancelled = True
474 self.out += _("This job was not launched because its father has failed.")
475 self.err += _("This job was not launched because its father has failed.")
477 def is_running(self):
478 '''Returns True if the job commands are running
480 :return: True if the job is running
483 return self.has_begun() and not self.has_finished()
485 def is_timeout(self):
486 '''Returns True if the job commands has finished with timeout
488 :return: True if the job has finished with timeout
491 return self._has_timouted
493 def time_elapsed(self):
494 """Get the time elapsed since the job launching
496 :return: The number of seconds
499 if not self.has_begun():
502 return T_now - self._T0
504 def check_time(self):
505 """Verify that the job has not exceeded its timeout.
506 If it has, kill the remote command and consider the job as finished.
508 if not self.has_begun():
510 if self.time_elapsed() > self.timeout:
511 self._has_finished = True
512 self._has_timouted = True
513 self._Tf = time.time()
514 (out_kill, __) = self.kill_remote_process()
515 self.out += "TIMEOUT \n" + out_kill
516 self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
519 except Exception as e:
520 self.err += _("Unable to get remote log files!\n%s\n" % str(e))
522 def total_duration(self):
523 """Give the total duration of the job
525 :return: the total duration of the job in seconds
528 return self._Tf - self._T0
531 """Launch the job by executing the remote command.
534 # Prevent multiple run
536 msg = _("Warning: A job can only be launched one time")
537 msg2 = _("Trying to launch the job \"%s\" whereas it has "
538 "already been launched." % self.name)
539 self.logger.write(src.printcolors.printcWarning("%s\n%s\n" % (msg,
543 # Do not execute the command if the machine could not be reached
544 if not self.machine.successfully_connected(self.logger):
545 self._has_finished = True
547 self.err += ("Connection to machine (name : %s, host: %s, port:"
548 " %s, user: %s) has failed\nUse the log command "
549 "to get more information."
550 % (self.machine.name,
555 # Usual case : Launch the command on remote machine
556 self._T0 = time.time()
557 self._stdin, self._stdout, self._stderr = self.machine.exec_command(
560 # If the results are not initialized, finish the job
561 if (self._stdin, self._stdout, self._stderr) == (None, None, None):
562 self._has_finished = True
563 self._Tf = time.time()
565 self.err += "The server failed to execute the command"
567 # Put the beginning flag to true.
568 self._has_begun = True
570 def write_results(self):
571 """Display on the terminal all the job's information
573 self.logger.write("name : " + self.name + "\n")
575 self.logger.write("after : %s\n" % self.after)
576 self.logger.write("Time elapsed : %4imin %2is \n" %
577 (self.total_duration()//60 , self.total_duration()%60))
579 self.logger.write("Begin time : %s\n" %
580 time.strftime('%Y-%m-%d %H:%M:%S',
581 time.localtime(self._T0)) )
583 self.logger.write("End time : %s\n\n" %
584 time.strftime('%Y-%m-%d %H:%M:%S',
585 time.localtime(self._Tf)) )
587 machine_head = "Informations about connection :\n"
588 underline = (len(machine_head) - 2) * "-"
589 self.logger.write(src.printcolors.printcInfo(
590 machine_head+underline+"\n"))
591 self.machine.write_info(self.logger)
593 self.logger.write(src.printcolors.printcInfo("out : \n"))
595 self.logger.write("Unable to get output\n")
597 self.logger.write(self.out + "\n")
598 self.logger.write(src.printcolors.printcInfo("err : \n"))
599 self.logger.write(self.err + "\n")
601 def get_status(self):
602 """Get the status of the job (used by the Gui for xml display)
604 :return: The current status of the job
607 if not self.machine.successfully_connected(self.logger):
608 return "SSH connection KO"
609 if not self.has_begun():
610 return "Not launched"
613 if self.is_running():
614 return "running since " + time.strftime('%Y-%m-%d %H:%M:%S',
615 time.localtime(self._T0))
616 if self.has_finished():
617 if self.is_timeout():
618 return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S',
619 time.localtime(self._Tf))
620 return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S',
621 time.localtime(self._Tf))
624 '''Class to manage the jobs to be run
631 lenght_columns = 20):
632 # The jobs configuration
633 self.cfg_jobs = config_jobs
634 self.job_file_path = job_file_path
635 # The machine that will be used today
637 # The list of machine (hosts, port) that will be used today
638 # (a same host can have several machine instances since there
639 # can be several ssh parameters)
641 # The jobs to be launched today
643 # The jobs that will not be launched today
644 self.ljobs_not_today = []
647 self.len_columns = lenght_columns
649 # the list of jobs that have not been run yet
650 self._l_jobs_not_started = []
651 # the list of jobs that have already ran
652 self._l_jobs_finished = []
653 # the list of jobs that are running
654 self._l_jobs_running = []
656 self.determine_jobs_and_machines()
658 def define_job(self, job_def, machine):
659 '''Takes a pyconf job definition and a machine (from class machine)
660 and returns the job instance corresponding to the definition.
662 :param job_def src.config.Mapping: a job definition
663 :param machine machine: the machine on which the job will run
664 :return: The corresponding job in a job class instance
668 cmmnds = job_def.commands
669 if not "timeout" in job_def:
670 timeout = 4*60*60 # default timeout = 4h
672 timeout = job_def.timeout
674 if 'after' in job_def:
675 after = job_def.after
677 if 'application' in job_def:
678 application = job_def.application
680 if 'board' in job_def:
681 board = job_def.board
683 if "prefix" in job_def:
684 prefix = job_def.prefix
698 def determine_jobs_and_machines(self):
699 '''Function that reads the pyconf jobs definition and instantiates all
700 the machines and jobs to be done today.
705 today = datetime.date.weekday(datetime.date.today())
708 for job_def in self.cfg_jobs.jobs :
710 if not "machine" in job_def:
711 msg = _('WARNING: The job "%s" do not have the key '
712 '"machine", this job is ignored.\n\n' % job_def.name)
713 self.logger.write(src.printcolors.printcWarning(msg))
715 name_machine = job_def.machine
718 for mach in self.lmachines:
719 if mach.name == name_machine:
723 if a_machine == None:
724 for machine_def in self.cfg_jobs.machines:
725 if machine_def.name == name_machine:
726 if 'host' not in machine_def:
727 host = self.runner.cfg.VARS.hostname
729 host = machine_def.host
731 if 'user' not in machine_def:
732 user = self.runner.cfg.VARS.user
734 user = machine_def.user
736 if 'port' not in machine_def:
739 port = machine_def.port
741 if 'password' not in machine_def:
744 passwd = machine_def.password
746 if 'sat_path' not in machine_def:
747 sat_path = "salomeTools"
749 sat_path = machine_def.sat_path
760 self.lmachines.append(a_machine)
761 if (host, port) not in host_list:
762 host_list.append((host, port))
764 if a_machine == None:
765 msg = _("WARNING: The job \"%(job_name)s\" requires the "
766 "machine \"%(machine_name)s\" but this machine "
767 "is not defined in the configuration file.\n"
768 "The job will not be launched\n")
769 self.logger.write(src.printcolors.printcWarning(
770 msg % {"job_name" : job_def.name,
771 "machine_name" : name_machine}))
774 a_job = self.define_job(job_def, a_machine)
776 if today in job_def.when:
777 self.ljobs.append(a_job)
778 else: # today in job_def.when
779 self.ljobs_not_today.append(a_job)
781 self.lhosts = host_list
783 def ssh_connection_all_machines(self, pad=50):
784 '''Function that do the ssh connection to every machine
790 self.logger.write(src.printcolors.printcInfo((
791 "Establishing connection with all the machines :\n")))
792 for machine in self.lmachines:
793 # little algorithm in order to display traces
794 begin_line = (_("Connection to %s: " % machine.name))
795 if pad - len(begin_line) < 0:
798 endline = (pad - len(begin_line)) * "." + " "
800 step = "SSH connection"
801 self.logger.write( begin_line + endline + step)
803 # the call to the method that initiate the ssh connection
804 msg = machine.connect(self.logger)
806 # Copy salomeTools to the remote machine
807 if machine.successfully_connected(self.logger):
808 step = _("Remove SAT")
809 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
810 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
811 (__, out_dist, __) = machine.exec_command(
812 "rm -rf %s" % machine.sat_path,
818 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
819 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
821 res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway,
824 # set the local settings of sat on the remote machine using
826 (__, out_dist, __) = machine.exec_command(
827 os.path.join(machine.sat_path,
828 "sat init --base unknown --workdir"
829 " unknown --log_dir unknown"),
833 # get the remote machine distribution using a sat command
834 (__, out_dist, __) = machine.exec_command(
835 os.path.join(machine.sat_path,
836 "sat config --value VARS.dist --no_label"),
838 machine.distribution = out_dist.read().decode().replace("\n",
841 # Print the status of the copy
843 self.logger.write('\r%s' %
844 ((len(begin_line)+len(endline)+20) * " "), 3)
845 self.logger.write('\r%s%s%s' %
848 src.printcolors.printc(src.OK_STATUS)), 3)
850 self.logger.write('\r%s' %
851 ((len(begin_line)+len(endline)+20) * " "), 3)
852 self.logger.write('\r%s%s%s %s' %
855 src.printcolors.printc(src.KO_STATUS),
856 _("Copy of SAT failed: %s" % res_copy)), 3)
858 self.logger.write('\r%s' %
859 ((len(begin_line)+len(endline)+20) * " "), 3)
860 self.logger.write('\r%s%s%s %s' %
863 src.printcolors.printc(src.KO_STATUS),
865 self.logger.write("\n", 3)
867 self.logger.write("\n")
870 def is_occupied(self, hostname):
871 '''Function that returns True if a job is running on
872 the machine defined by its host and its port.
874 :param hostname (str, int): the pair (host, port)
875 :return: the job that is running on the host,
876 or false if there is no job running on the host.
881 for jb in self.ljobs:
882 if jb.machine.host == host and jb.machine.port == port:
887 def update_jobs_states_list(self):
888 '''Function that updates the lists that store the currently
889 running jobs and the jobs that have already finished.
894 jobs_finished_list = []
895 jobs_running_list = []
896 for jb in self.ljobs:
898 jobs_running_list.append(jb)
900 if jb.has_finished():
901 jobs_finished_list.append(jb)
903 nb_job_finished_before = len(self._l_jobs_finished)
904 self._l_jobs_finished = jobs_finished_list
905 self._l_jobs_running = jobs_running_list
907 nb_job_finished_now = len(self._l_jobs_finished)
909 return nb_job_finished_now > nb_job_finished_before
911 def cancel_dependencies_of_failing_jobs(self):
912 '''Function that cancels all the jobs that depend on a failing one.
918 for job in self.ljobs:
919 if job.after is None:
921 father_job = self.find_job_that_has_name(job.after)
922 if father_job is not None and father_job.has_failed():
925 def find_job_that_has_name(self, name):
926 '''Returns the job by its name.
928 :param name str: a job name
929 :return: the job that has the name.
932 for jb in self.ljobs:
935 # the following is executed only if the job was not found
938 def str_of_length(self, text, length):
939 '''Takes a string text of any length and returns
940 the most close string of length "length".
942 :param text str: any string
943 :param length int: a length for the returned string
944 :return: the most close string of length "length"
947 if len(text) > length:
948 text_out = text[:length-3] + '...'
950 diff = length - len(text)
951 before = " " * (diff//2)
952 after = " " * (diff//2 + diff%2)
953 text_out = before + text + after
957 def display_status(self, len_col):
958 '''Takes a lenght and construct the display of the current status
959 of the jobs in an array that has a column for each host.
960 It displays the job that is currently running on the host
963 :param len_col int: the size of the column
969 for host_port in self.lhosts:
970 jb = self.is_occupied(host_port)
971 if not jb: # nothing running on the host
972 empty = self.str_of_length("empty", len_col)
973 display_line += "|" + empty
975 display_line += "|" + src.printcolors.printcInfo(
976 self.str_of_length(jb.name, len_col))
978 self.logger.write("\r" + display_line + "|")
983 '''The main method. Runs all the jobs on every host.
984 For each host, at a given time, only one job can be running.
985 The jobs that have the field after (that contain the job that has
986 to be run before it) are run after the previous job.
987 This method stops when all the jobs are finished.
994 self.logger.write(src.printcolors.printcInfo(
995 _('Executing the jobs :\n')))
997 for host_port in self.lhosts:
1000 if port == 22: # default value
1001 text_line += "|" + self.str_of_length(host, self.len_columns)
1003 text_line += "|" + self.str_of_length(
1004 "("+host+", "+str(port)+")", self.len_columns)
1006 tiret_line = " " + "-"*(len(text_line)-1) + "\n"
1007 self.logger.write(tiret_line)
1008 self.logger.write(text_line + "|\n")
1009 self.logger.write(tiret_line)
1012 # The infinite loop that runs the jobs
1013 l_jobs_not_started = src.deepcopy_list(self.ljobs)
1014 while len(self._l_jobs_finished) != len(self.ljobs):
1015 new_job_start = False
1016 for host_port in self.lhosts:
1018 if self.is_occupied(host_port):
1021 for jb in l_jobs_not_started:
1022 if (jb.machine.host, jb.machine.port) != host_port:
1024 if jb.after == None:
1026 l_jobs_not_started.remove(jb)
1027 new_job_start = True
1030 jb_before = self.find_job_that_has_name(jb.after)
1031 if jb_before is None:
1033 msg = _("This job was not launched because its "
1034 "father is not in the jobs list.")
1038 if jb_before.has_finished():
1040 l_jobs_not_started.remove(jb)
1041 new_job_start = True
1043 self.cancel_dependencies_of_failing_jobs()
1044 new_job_finished = self.update_jobs_states_list()
1046 if new_job_start or new_job_finished:
1048 self.gui.update_xml_files(self.ljobs)
1049 # Display the current status
1050 self.display_status(self.len_columns)
1052 # Make sure that the proc is not entirely busy
1055 self.logger.write("\n")
1056 self.logger.write(tiret_line)
1057 self.logger.write("\n\n")
1060 self.gui.update_xml_files(self.ljobs)
1061 self.gui.last_update()
1063 def write_all_results(self):
1064 '''Display all the jobs outputs.
1070 for jb in self.ljobs:
1071 self.logger.write(src.printcolors.printcLabel(
1072 "#------- Results for job %s -------#\n" % jb.name))
1074 self.logger.write("\n\n")
1077 '''Class to manage the the xml data that can be displayed in a browser to
1090 :param xml_dir_path str: The path to the directory where to put
1091 the xml resulting files
1092 :param l_jobs List: the list of jobs that run today
1093 :param l_jobs_not_today List: the list of jobs that do not run today
1094 :param file_boards str: the file path from which to read the
1097 # The logging instance
1098 self.logger = logger
1100 # The prefix to add to the xml files : date_hour
1101 self.prefix = prefix
1103 # The path of the csv files to read to fill the expected boards
1104 self.file_boards = file_boards
1106 if file_boards != "":
1107 today = datetime.date.weekday(datetime.date.today())
1108 self.parse_csv_boards(today)
1110 self.d_input_boards = {}
1112 # The path of the global xml file
1113 self.xml_dir_path = xml_dir_path
1114 # Initialize the xml files
1115 self.global_name = "global_report"
1116 xml_global_path = os.path.join(self.xml_dir_path,
1117 self.global_name + ".xml")
1118 self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path,
1121 # Find history for each job
1123 self.find_history(l_jobs, l_jobs_not_today)
1125 # The xml files that corresponds to the boards.
1126 # {name_board : xml_object}}
1127 self.d_xml_board_files = {}
1129 # Create the lines and columns
1130 self.initialize_boards(l_jobs, l_jobs_not_today)
1132 # Write the xml file
1133 self.update_xml_files(l_jobs)
1135 def add_xml_board(self, name):
1136 '''Add a board to the board list
1137 :param name str: the board name
1139 xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
1140 self.d_xml_board_files[name] = src.xmlManager.XmlLogFile(
1143 self.d_xml_board_files[name].add_simple_node("distributions")
1144 self.d_xml_board_files[name].add_simple_node("applications")
1145 self.d_xml_board_files[name].add_simple_node("board", text=name)
1147 def initialize_boards(self, l_jobs, l_jobs_not_today):
1148 '''Get all the first information needed for each file and write the
1149 first version of the files
1150 :param l_jobs List: the list of jobs that run today
1151 :param l_jobs_not_today List: the list of jobs that do not run today
1153 # Get the boards to fill and put it in a dictionary
1154 # {board_name : xml instance corresponding to the board}
1155 for job in l_jobs + l_jobs_not_today:
1157 if (board is not None and
1158 board not in self.d_xml_board_files.keys()):
1159 self.add_xml_board(board)
1161 # Verify that the boards given as input are done
1162 for board in list(self.d_input_boards.keys()):
1163 if board not in self.d_xml_board_files:
1164 self.add_xml_board(board)
1165 root_node = self.d_xml_board_files[board].xmlroot
1166 src.xmlManager.append_node_attrib(root_node,
1167 {"input_file" : self.file_boards})
1169 # Loop over all jobs in order to get the lines and columns for each
1173 for board in self.d_xml_board_files:
1175 d_application[board] = []
1179 for job in l_jobs + l_jobs_not_today:
1181 if (job.machine.host, job.machine.port) not in l_hosts_ports:
1182 l_hosts_ports.append((job.machine.host, job.machine.port))
1184 distrib = job.machine.distribution
1185 application = job.application
1187 board_job = job.board
1190 for board in self.d_xml_board_files:
1191 if board_job == board:
1192 if (distrib not in [None, ''] and
1193 distrib not in d_dist[board]):
1194 d_dist[board].append(distrib)
1195 src.xmlManager.add_simple_node(
1196 self.d_xml_board_files[board].xmlroot.find(
1199 attrib={"name" : distrib})
1201 if board_job == board:
1202 if (application not in [None, ''] and
1203 application not in d_application[board]):
1204 d_application[board].append(application)
1205 src.xmlManager.add_simple_node(
1206 self.d_xml_board_files[board].xmlroot.find(
1210 "name" : application})
1212 # Verify that there are no missing application or distribution in the
1213 # xml board files (regarding the input boards)
1214 for board in self.d_xml_board_files:
1215 l_dist = d_dist[board]
1216 if board not in self.d_input_boards.keys():
1218 for dist in self.d_input_boards[board]["rows"]:
1219 if dist not in l_dist:
1220 src.xmlManager.add_simple_node(
1221 self.d_xml_board_files[board].xmlroot.find(
1224 attrib={"name" : dist})
1225 l_appli = d_application[board]
1226 for appli in self.d_input_boards[board]["columns"]:
1227 if appli not in l_appli:
1228 src.xmlManager.add_simple_node(
1229 self.d_xml_board_files[board].xmlroot.find(
1232 attrib={"name" : appli})
1234 # Initialize the hosts_ports node for the global file
1235 self.xmlhosts_ports = self.xml_global_file.add_simple_node(
1237 for host, port in l_hosts_ports:
1238 host_port = "%s:%i" % (host, port)
1239 src.xmlManager.add_simple_node(self.xmlhosts_ports,
1241 attrib={"name" : host_port})
1243 # Initialize the jobs node in all files
1244 for xml_file in [self.xml_global_file] + list(
1245 self.d_xml_board_files.values()):
1246 xml_jobs = xml_file.add_simple_node("jobs")
1247 # Get the jobs present in the config file but
1248 # that will not be launched today
1249 self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
1251 # add also the infos node
1252 xml_file.add_simple_node("infos",
1253 attrib={"name" : "last update",
1254 "JobsCommandStatus" : "running"})
1256 # and put the history node
1257 history_node = xml_file.add_simple_node("history")
1258 name_board = os.path.basename(xml_file.logFile)[:-len(".xml")]
1259 # serach for board files
1260 expression = "^[0-9]{8}_+[0-9]{6}_" + name_board + ".xml$"
1261 oExpr = re.compile(expression)
1262 # Get the list of xml borad files that are in the log directory
1263 for file_name in os.listdir(self.xml_dir_path):
1264 if oExpr.search(file_name):
1265 date = os.path.basename(file_name).split("_")[0]
1266 file_path = os.path.join(self.xml_dir_path, file_name)
1267 src.xmlManager.add_simple_node(history_node,
1270 attrib={"date" : date})
1273 # Find in each board the squares that needs to be filled regarding the
1274 # input csv files but that are not covered by a today job
1275 for board in self.d_input_boards.keys():
1276 xml_root_board = self.d_xml_board_files[board].xmlroot
1277 # Find the missing jobs for today
1278 xml_missing = src.xmlManager.add_simple_node(xml_root_board,
1280 for row, column in self.d_input_boards[board]["jobs"]:
1283 if (job.application == column and
1284 job.machine.distribution == row):
1288 src.xmlManager.add_simple_node(xml_missing,
1290 attrib={"distribution" : row,
1291 "application" : column })
1292 # Find the missing jobs not today
1293 xml_missing_not_today = src.xmlManager.add_simple_node(
1295 "missing_jobs_not_today")
1296 for row, column in self.d_input_boards[board]["jobs_not_today"]:
1298 for job in l_jobs_not_today:
1299 if (job.application == column and
1300 job.machine.distribution == row):
1304 src.xmlManager.add_simple_node(xml_missing_not_today,
1306 attrib={"distribution" : row,
1307 "application" : column })
1309 def find_history(self, l_jobs, l_jobs_not_today):
1310 """find, for each job, in the existent xml boards the results for the
1311 job. Store the results in the dictionnary self.history = {name_job :
1312 list of (date, status, list links)}
1314 :param l_jobs List: the list of jobs to run today
1315 :param l_jobs_not_today List: the list of jobs that do not run today
1317 # load the all the history
1318 expression = "^[0-9]{8}_+[0-9]{6}_" + self.global_name + ".xml$"
1319 oExpr = re.compile(expression)
1320 # Get the list of global xml that are in the log directory
1322 for file_name in os.listdir(self.xml_dir_path):
1323 if oExpr.search(file_name):
1324 file_path = os.path.join(self.xml_dir_path, file_name)
1326 global_xml = src.xmlManager.ReadXmlFile(file_path)
1327 l_globalxml.append(global_xml)
1328 except Exception as e:
1329 msg = _("\nWARNING: the file %s can not be read, it will be "
1330 "ignored\n%s" % (file_path, e))
1331 self.logger.write("%s\n" % src.printcolors.printcWarning(
1334 # Construct the dictionnary self.history
1335 for job in l_jobs + l_jobs_not_today:
1337 for global_xml in l_globalxml:
1338 date = os.path.basename(global_xml.filePath).split("_")[0]
1339 global_root_node = global_xml.xmlroot.find("jobs")
1340 job_node = src.xmlManager.find_node_by_attrib(
1346 if job_node.find("remote_log_file_path") is not None:
1347 link = job_node.find("remote_log_file_path").text
1348 res_job = job_node.find("res").text
1349 if link != "nothing":
1350 l_links.append((date, res_job, link))
1351 l_links = sorted(l_links, reverse=True)
1352 self.history[job.name] = l_links
1354 def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
1355 '''Get all the first information needed for each file and write the
1356 first version of the files
1358 :param xml_node_jobs etree.Element: the node corresponding to a job
1359 :param l_jobs_not_today List: the list of jobs that do not run today
1361 for job in l_jobs_not_today:
1362 xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1364 attrib={"name" : job.name})
1365 src.xmlManager.add_simple_node(xmlj, "application", job.application)
1366 src.xmlManager.add_simple_node(xmlj,
1368 job.machine.distribution)
1369 src.xmlManager.add_simple_node(xmlj, "board", job.board)
1370 src.xmlManager.add_simple_node(xmlj,
1371 "commands", " ; ".join(job.commands))
1372 src.xmlManager.add_simple_node(xmlj, "state", "Not today")
1373 src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1374 src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1375 src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1376 src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1377 src.xmlManager.add_simple_node(xmlj, "sat_path",
1378 job.machine.sat_path)
1379 xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1380 for i, (date, res_job, link) in enumerate(self.history[job.name]):
1382 # tag the first one (the last one)
1383 src.xmlManager.add_simple_node(xml_history,
1386 attrib={"date" : date,
1390 src.xmlManager.add_simple_node(xml_history,
1393 attrib={"date" : date,
1397 def parse_csv_boards(self, today):
1398 """ Parse the csv file that describes the boards to produce and fill
1399 the dict d_input_boards that contain the csv file contain
1401 :param today int: the current day of the week
1403 # open the csv file and read its content
1405 with open(self.file_boards, 'r') as f:
1406 reader = csv.reader(f,delimiter=CSV_DELIMITER)
1409 # get the delimiter for the boards (empty line)
1410 boards_delimiter = [''] * len(l_read[0])
1411 # Make the list of boards, by splitting with the delimiter
1412 l_boards = [list(y) for x, y in itertools.groupby(l_read,
1413 lambda z: z == boards_delimiter) if not x]
1415 # loop over the csv lists of lines and get the rows, columns and jobs
1417 for input_board in l_boards:
1419 board_name = input_board[0][0]
1422 columns = input_board[0][1:]
1427 for line in input_board[1:]:
1430 for i, square in enumerate(line[1:]):
1433 days = square.split(DAYS_SEPARATOR)
1434 days = [int(day) for day in days]
1435 job = (row, columns[i])
1439 jobs_not_today.append(job)
1441 d_boards[board_name] = {"rows" : rows,
1442 "columns" : columns,
1444 "jobs_not_today" : jobs_not_today}
1446 self.d_input_boards = d_boards
1448 def update_xml_files(self, l_jobs):
1449 '''Write all the xml files with updated information about the jobs
1451 :param l_jobs List: the list of jobs that run today
1453 for xml_file in [self.xml_global_file] + list(
1454 self.d_xml_board_files.values()):
1455 self.update_xml_file(l_jobs, xml_file)
1458 self.write_xml_files()
1460 def update_xml_file(self, l_jobs, xml_file):
1461 '''update information about the jobs for the file xml_file
1463 :param l_jobs List: the list of jobs that run today
1464 :param xml_file xmlManager.XmlLogFile: the xml instance to update
1467 xml_node_jobs = xml_file.xmlroot.find('jobs')
1468 # Update the job names and status node
1470 # Find the node corresponding to the job and delete it
1471 # in order to recreate it
1472 for xmljob in xml_node_jobs.findall('job'):
1473 if xmljob.attrib['name'] == job.name:
1474 xml_node_jobs.remove(xmljob)
1478 T0 = time.strftime('%Y-%m-%d %H:%M:%S',
1479 time.localtime(job._T0))
1482 Tf = time.strftime('%Y-%m-%d %H:%M:%S',
1483 time.localtime(job._Tf))
1485 # recreate the job node
1486 xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1488 attrib={"name" : job.name})
1489 src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1490 src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1491 src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1492 src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1493 xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1494 for date, res_job, link in self.history[job.name]:
1495 src.xmlManager.add_simple_node(xml_history,
1498 attrib={"date" : date,
1501 src.xmlManager.add_simple_node(xmlj, "sat_path",
1502 job.machine.sat_path)
1503 src.xmlManager.add_simple_node(xmlj, "application", job.application)
1504 src.xmlManager.add_simple_node(xmlj, "distribution",
1505 job.machine.distribution)
1506 src.xmlManager.add_simple_node(xmlj, "board", job.board)
1507 src.xmlManager.add_simple_node(xmlj, "timeout", str(job.timeout))
1508 src.xmlManager.add_simple_node(xmlj, "commands",
1509 " ; ".join(job.commands))
1510 src.xmlManager.add_simple_node(xmlj, "state", job.get_status())
1511 src.xmlManager.add_simple_node(xmlj, "begin", T0)
1512 src.xmlManager.add_simple_node(xmlj, "end", Tf)
1513 src.xmlManager.add_simple_node(xmlj, "out",
1514 src.printcolors.cleancolor(job.out))
1515 src.xmlManager.add_simple_node(xmlj, "err",
1516 src.printcolors.cleancolor(job.err))
1517 src.xmlManager.add_simple_node(xmlj, "res", str(job.res_job))
1518 if len(job.remote_log_files) > 0:
1519 src.xmlManager.add_simple_node(xmlj,
1520 "remote_log_file_path",
1521 job.remote_log_files[0])
1523 src.xmlManager.add_simple_node(xmlj,
1524 "remote_log_file_path",
1526 # Search for the test log if there is any
1527 l_test_log_files = self.find_test_log(job.remote_log_files)
1528 xml_test = src.xmlManager.add_simple_node(xmlj,
1529 "test_log_file_path")
1530 for test_log_path, res_test, nb_fails in l_test_log_files:
1531 test_path_node = src.xmlManager.add_simple_node(xml_test,
1534 test_path_node.attrib["res"] = res_test
1535 test_path_node.attrib["nb_fails"] = nb_fails
1537 xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
1538 # get the job father
1539 if job.after is not None:
1542 if jb.name == job.after:
1545 if (job_father is not None and
1546 len(job_father.remote_log_files) > 0):
1547 link = job_father.remote_log_files[0]
1550 src.xmlManager.append_node_attrib(xmlafter, {"link" : link})
1552 # Verify that the job is to be done today regarding the input csv
1554 if job.board and job.board in self.d_input_boards.keys():
1556 for dist, appli in self.d_input_boards[job.board]["jobs"]:
1557 if (job.machine.distribution == dist
1558 and job.application == appli):
1560 src.xmlManager.add_simple_node(xmlj,
1565 src.xmlManager.add_simple_node(xmlj,
1571 xml_node_infos = xml_file.xmlroot.find('infos')
1572 src.xmlManager.append_node_attrib(xml_node_infos,
1574 datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
1577 def find_test_log(self, l_remote_log_files):
1578 '''Find if there is a test log (board) in the remote log files and
1579 the path to it. There can be several test command, so the result is
1582 :param l_remote_log_files List: the list of all remote log files
1583 :return: the list of (test log files path, res of the command)
1587 for file_path in l_remote_log_files:
1588 dirname = os.path.basename(os.path.dirname(file_path))
1589 file_name = os.path.basename(file_path)
1590 regex = src.logger.log_all_command_file_expression
1591 oExpr = re.compile(regex)
1592 if dirname == "TEST" and oExpr.search(file_name):
1593 # find the res of the command
1594 prod_node = etree.parse(file_path).getroot().find("product")
1595 res_test = prod_node.attrib["global_res"]
1596 # find the number of fails
1597 testbase_node = prod_node.find("tests").find("testbase")
1598 nb_fails = int(testbase_node.attrib["failed"])
1599 # put the file path, the res of the test command and the number
1600 # of fails in the output
1601 res.append((file_path, res_test, nb_fails))
1605 def last_update(self, finish_status = "finished"):
1606 '''update information about the jobs for the file xml_file
1608 :param l_jobs List: the list of jobs that run today
1609 :param xml_file xmlManager.XmlLogFile: the xml instance to update
1611 for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
1612 xml_node_infos = xml_file.xmlroot.find('infos')
1613 src.xmlManager.append_node_attrib(xml_node_infos,
1614 attrib={"JobsCommandStatus" : finish_status})
1616 self.write_xml_files()
1618 def write_xml_file(self, xml_file, stylesheet):
1619 ''' Write one xml file and the same file with prefix
1621 xml_file.write_tree(stylesheet)
1622 file_path = xml_file.logFile
1623 file_dir = os.path.dirname(file_path)
1624 file_name = os.path.basename(file_path)
1625 file_name_with_prefix = self.prefix + "_" + file_name
1626 xml_file.write_tree(stylesheet, os.path.join(file_dir,
1627 file_name_with_prefix))
1629 def write_xml_files(self):
1630 ''' Write the xml files
1632 self.write_xml_file(self.xml_global_file, STYLESHEET_GLOBAL)
1633 for xml_file in self.d_xml_board_files.values():
1634 self.write_xml_file(xml_file, STYLESHEET_BOARD)
1636 def get_config_file_path(job_config_name, l_cfg_dir):
1638 file_jobs_cfg = None
1639 if os.path.exists(job_config_name) and job_config_name.endswith(".pyconf"):
1641 file_jobs_cfg = job_config_name
1643 for cfg_dir in l_cfg_dir:
1644 file_jobs_cfg = os.path.join(cfg_dir, job_config_name)
1645 if not file_jobs_cfg.endswith('.pyconf'):
1646 file_jobs_cfg += '.pyconf'
1648 if not os.path.exists(file_jobs_cfg):
1653 return found, file_jobs_cfg
1655 def develop_factorized_jobs(config_jobs):
1656 '''update information about the jobs for the file xml_file
1658 :param config_jobs Config: the config corresponding to the jos description
1660 developed_jobs_list = []
1661 for jb in config_jobs.jobs:
1662 # case where the jobs are not developed
1663 if type(jb.machine) == type(""):
1664 developed_jobs_list.append(jb)
1666 # Case where the jobs must be developed
1668 # machine : ["CO7.2 physique", ["CO6.4 physique", $MONDAY, $TUESDAY ], "FD22"]
1670 for machine in jb.machine:
1671 new_job = src.pyconf.deepCopyMapping(jb)
1672 # case where there is a jobs on the machine corresponding to all
1673 # days in when variable.
1674 if type(machine) == type(""):
1675 new_job.machine = machine
1676 new_job.name = name_job + " / " + machine
1678 # case the days are re defined
1679 new_job.machine = machine[0]
1680 new_job.name = name_job + " / " + machine[0]
1681 new_job.when = machine[1:]
1682 developed_jobs_list.append(new_job)
1684 config_jobs.jobs = developed_jobs_list
1688 # Describes the command
1690 return _("The jobs command launches maintenances that are described"
1691 " in the dedicated jobs configuration file.\n\nexample:\nsat "
1692 "jobs --name my_jobs --publish")
1696 def run(args, runner, logger):
1698 (options, args) = parser.parse_args(args)
1700 l_cfg_dir = runner.cfg.PATHS.JOBPATH
1702 # list option : display all the available config files
1704 for cfg_dir in l_cfg_dir:
1705 if not options.no_label:
1706 logger.write("------ %s\n" %
1707 src.printcolors.printcHeader(cfg_dir))
1708 if not os.path.exists(cfg_dir):
1710 for f in sorted(os.listdir(cfg_dir)):
1711 if not f.endswith('.pyconf'):
1714 logger.write("%s\n" % cfilename)
1717 # Make sure the jobs_config option has been called
1718 if not options.jobs_cfg:
1719 message = _("The option --jobs_config is required\n")
1720 src.printcolors.printcError(message)
1723 # Find the file in the directories, unless it is a full path
1724 # merge all in a config
1725 merger = src.pyconf.ConfigMerger()
1726 config_jobs = src.pyconf.Config()
1727 l_conf_files_path = []
1728 for config_file in options.jobs_cfg:
1729 found, file_jobs_cfg = get_config_file_path(config_file, l_cfg_dir)
1731 msg = _("The file configuration %s was not found."
1732 "\nUse the --list option to get the "
1733 "possible files." % config_file)
1734 logger.write("%s\n" % src.printcolors.printcError(msg), 1)
1736 l_conf_files_path.append(file_jobs_cfg)
1737 # Read the config that is in the file
1738 one_config_jobs = src.read_config_from_a_file(file_jobs_cfg)
1739 merger.merge(config_jobs, one_config_jobs)
1742 (_("Platform"), runner.cfg.VARS.dist),
1743 (_("Files containing the jobs configuration"), l_conf_files_path)
1745 src.print_info(logger, info)
1747 if options.only_jobs:
1748 l_jb = src.pyconf.Sequence()
1749 for jb in config_jobs.jobs:
1750 if jb.name in options.only_jobs:
1752 "Job that was given in only_jobs option parameters\n")
1753 config_jobs.jobs = l_jb
1755 # Parse the config jobs in order to develop all the factorized jobs
1756 develop_factorized_jobs(config_jobs)
1758 # Make a unique file that contain all the jobs in order to use it
1760 name_pyconf = "_".join([os.path.basename(path)[:-len('.pyconf')]
1761 for path in l_conf_files_path]) + ".pyconf"
1762 path_pyconf = src.get_tmp_filename(runner.cfg, name_pyconf)
1764 f = file( path_pyconf , 'w')
1765 config_jobs.__save__(f)
1767 # log the paramiko problems
1768 log_dir = src.get_log_path(runner.cfg)
1769 paramiko_log_dir_path = os.path.join(log_dir, "JOBS")
1770 src.ensure_path_exists(paramiko_log_dir_path)
1771 paramiko.util.log_to_file(os.path.join(paramiko_log_dir_path,
1772 logger.txtFileName))
1775 today_jobs = Jobs(runner,
1780 # SSH connection to all machines
1781 today_jobs.ssh_connection_all_machines()
1782 if options.test_connection:
1787 logger.write(src.printcolors.printcInfo(
1788 _("Initialize the xml boards : ")), 5)
1791 # Copy the stylesheets in the log directory
1793 xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
1795 files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
1796 files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD))
1797 files_to_copy.append(os.path.join(xsl_dir, "command.xsl"))
1798 files_to_copy.append(os.path.join(xsl_dir, "running.gif"))
1799 for file_path in files_to_copy:
1800 shutil.copy2(file_path, log_dir)
1802 # Instanciate the Gui in order to produce the xml files that contain all
1806 today_jobs.ljobs_not_today,
1807 runner.cfg.VARS.datehour,
1809 file_boards = options.input_boards)
1811 logger.write(src.printcolors.printcSuccess("OK"), 5)
1812 logger.write("\n\n", 5)
1815 # Display the list of the xml files
1816 logger.write(src.printcolors.printcInfo(("Here is the list of published"
1818 logger.write("%s\n" % gui.xml_global_file.logFile, 4)
1819 for board in gui.d_xml_board_files.keys():
1820 file_path = gui.d_xml_board_files[board].logFile
1821 file_name = os.path.basename(file_path)
1822 logger.write("%s\n" % file_path, 4)
1823 logger.add_link(file_name, "board", 0, board)
1825 logger.write("\n", 4)
1827 today_jobs.gui = gui
1831 # Run all the jobs contained in config_jobs
1832 today_jobs.run_jobs()
1833 except KeyboardInterrupt:
1835 logger.write("\n\n%s\n\n" %
1836 (src.printcolors.printcWarning(_("Forced interruption"))), 1)
1837 except Exception as e:
1838 msg = _("CRITICAL ERROR: The jobs loop has been interrupted\n")
1839 logger.write("\n\n%s\n" % src.printcolors.printcError(msg) )
1840 logger.write("%s\n" % str(e))
1842 __, __, exc_traceback = sys.exc_info()
1843 fp = tempfile.TemporaryFile()
1844 traceback.print_tb(exc_traceback, file=fp)
1847 logger.write("\nTRACEBACK: %s\n" % stack.replace('"',"'"), 1)
1853 msg = _("Killing the running jobs and trying"
1854 " to get the corresponding logs\n")
1855 logger.write(src.printcolors.printcWarning(msg))
1857 # find the potential not finished jobs and kill them
1858 for jb in today_jobs.ljobs:
1859 if not jb.has_finished():
1862 jb.kill_remote_process()
1863 except Exception as e:
1864 msg = _("Failed to kill job %s: %s\n" % (jb.name, e))
1865 logger.write(src.printcolors.printcWarning(msg))
1866 if jb.res_job != "0":
1870 today_jobs.gui.last_update(_("Forced interruption"))
1873 today_jobs.gui.last_update()
1874 # Output the results
1875 today_jobs.write_all_results()
1876 # Remove the temporary pyconf file
1877 if os.path.exists(path_pyconf):
1878 os.remove(path_pyconf)