3 # Copyright (C) 2010-2013 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
32 import src.ElementTree as etree
34 STYLESHEET_GLOBAL = "jobs_global_report.xsl"
35 STYLESHEET_BOARD = "jobs_board_report.xsl"
40 parser = src.options.Options()
42 parser.add_option('n', 'name', 'list2', 'jobs_cfg',
43 _('Mandatory: The name of the config file that contains'
44 ' the jobs configuration. Can be a list.'))
45 parser.add_option('o', 'only_jobs', 'list2', 'only_jobs',
46 _('Optional: the list of jobs to launch, by their name. '))
47 parser.add_option('l', 'list', 'boolean', 'list',
48 _('Optional: list all available config files.'))
49 parser.add_option('t', 'test_connection', 'boolean', 'test_connection',
50 _("Optional: try to connect to the machines. "
51 "Not executing the jobs."),
53 parser.add_option('p', 'publish', 'boolean', 'publish',
54 _("Optional: generate an xml file that can be read in a "
55 "browser to display the jobs status."),
57 parser.add_option('i', 'input_boards', 'string', 'input_boards', _("Optional: "
58 "the path to csv file that contain "
59 "the expected boards."),"")
60 parser.add_option('', 'completion', 'boolean', 'no_label',
61 _("Optional (internal use): do not print labels, Works only "
65 class Machine(object):
66 '''Class to manage a ssh connection on a machine
74 sat_path="salomeTools"):
78 self.distribution = None # Will be filled after copying SAT on the machine
80 self.password = passwd
81 self.sat_path = sat_path
82 self.ssh = paramiko.SSHClient()
83 self._connection_successful = None
85 def connect(self, logger):
86 '''Initiate the ssh connection to the remote machine
88 :param logger src.logger.Logger: The logger instance
93 self._connection_successful = False
94 self.ssh.load_system_host_keys()
95 self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
97 self.ssh.connect(self.host,
100 password = self.password)
101 except paramiko.AuthenticationException:
102 message = src.KO_STATUS + _("Authentication failed")
103 except paramiko.BadHostKeyException:
104 message = (src.KO_STATUS +
105 _("The server's host key could not be verified"))
106 except paramiko.SSHException:
107 message = ( _("SSHException error connecting or "
108 "establishing an SSH session"))
110 message = ( _("Error connecting or establishing an SSH session"))
112 self._connection_successful = True
116 def successfully_connected(self, logger):
117 '''Verify if the connection to the remote machine has succeed
119 :param logger src.logger.Logger: The logger instance
120 :return: True if the connection has succeed, False if not
123 if self._connection_successful == None:
124 message = _("Warning : trying to ask if the connection to "
125 "(name: %s host: %s, port: %s, user: %s) is OK whereas there were"
126 " no connection request" %
127 (self.name, self.host, self.port, self.user))
128 logger.write( src.printcolors.printcWarning(message))
129 return self._connection_successful
131 def copy_sat(self, sat_local_path, job_file):
132 '''Copy salomeTools to the remote machine in self.sat_path
136 # open a sftp connection
137 self.sftp = self.ssh.open_sftp()
138 # Create the sat directory on remote machine if it is not existing
139 self.mkdir(self.sat_path, ignore_existing=True)
141 self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
142 # put the job configuration file in order to make it reachable
143 # on the remote machine
144 remote_job_file_name = ".%s" % os.path.basename(job_file)
145 self.sftp.put(job_file, os.path.join(self.sat_path,
146 remote_job_file_name))
147 except Exception as e:
149 self._connection_successful = False
153 def put_dir(self, source, target, filters = []):
154 ''' Uploads the contents of the source directory to the target path. The
155 target directory needs to exists. All sub-directories in source are
156 created under target.
158 for item in os.listdir(source):
161 source_path = os.path.join(source, item)
162 destination_path = os.path.join(target, item)
163 if os.path.islink(source_path):
164 linkto = os.readlink(source_path)
166 self.sftp.symlink(linkto, destination_path)
167 self.sftp.chmod(destination_path,
168 os.stat(source_path).st_mode)
172 if os.path.isfile(source_path):
173 self.sftp.put(source_path, destination_path)
174 self.sftp.chmod(destination_path,
175 os.stat(source_path).st_mode)
177 self.mkdir(destination_path, ignore_existing=True)
178 self.put_dir(source_path, destination_path)
180 def mkdir(self, path, mode=511, ignore_existing=False):
181 ''' Augments mkdir by adding an option to not fail
185 self.sftp.mkdir(path, mode)
192 def exec_command(self, command, logger):
193 '''Execute the command on the remote machine
195 :param command str: The command to be run
196 :param logger src.logger.Logger: The logger instance
197 :return: the stdin, stdout, and stderr of the executing command,
199 :rtype: (paramiko.channel.ChannelFile, paramiko.channel.ChannelFile,
200 paramiko.channel.ChannelFile)
203 # Does not wait the end of the command
204 (stdin, stdout, stderr) = self.ssh.exec_command(command)
205 except paramiko.SSHException:
206 message = src.KO_STATUS + _(
207 ": the server failed to execute the command\n")
208 logger.write( src.printcolors.printcError(message))
209 return (None, None, None)
211 logger.write( src.printcolors.printcError(src.KO_STATUS + '\n'))
212 return (None, None, None)
214 return (stdin, stdout, stderr)
217 '''Close the ssh connection
223 def write_info(self, logger):
224 '''Prints the informations relative to the machine in the logger
225 (terminal traces and log file)
227 :param logger src.logger.Logger: The logger instance
231 logger.write("host : " + self.host + "\n")
232 logger.write("port : " + str(self.port) + "\n")
233 logger.write("user : " + str(self.user) + "\n")
234 if self.successfully_connected(logger):
235 status = src.OK_STATUS
237 status = src.KO_STATUS
238 logger.write("Connection : " + status + "\n\n")
242 '''Class to manage one job
258 self.machine = machine
260 self.timeout = timeout
261 self.application = application
265 # The list of log files to download from the remote machine
266 self.remote_log_files = []
268 # The remote command status
269 # -1 means that it has not been launched,
270 # 0 means success and 1 means fail
272 self.cancelled = False
276 self._has_begun = False
277 self._has_finished = False
278 self._has_timouted = False
279 self._stdin = None # Store the command inputs field
280 self._stdout = None # Store the command outputs field
281 self._stderr = None # Store the command errors field
286 self.name_remote_jobs_pyconf = ".%s" % os.path.basename(job_file_path)
287 self.commands = commands
288 self.command = (os.path.join(self.machine.sat_path, "sat") +
290 os.path.join(self.machine.sat_path,
291 "list_log_files.txt") +
292 " job --jobs_config " +
293 os.path.join(self.machine.sat_path,
294 self.name_remote_jobs_pyconf) +
298 self.command = prefix + ' "' + self.command +'"'
301 """ Get the pid(s) corresponding to the command that have been launched
302 On the remote machine
304 :return: The list of integers corresponding to the found pids
308 cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\''
309 (_, out_pid, _) = self.machine.exec_command(cmd_pid, self.logger)
310 pids_cmd = out_pid.readlines()
311 pids_cmd = [str(src.only_numbers(pid)) for pid in pids_cmd]
315 def kill_remote_process(self, wait=1):
316 '''Kills the process on the remote machine.
318 :return: (the output of the kill, the error of the kill)
322 pids = self.get_pids()
324 return ("Unable to get the pid of the command.", "")
326 cmd_kill = " ; ".join([("kill -2 " + pid) for pid in pids])
327 (_, out_kill, err_kill) = self.machine.exec_command(cmd_kill,
330 return (out_kill.read().decode(), err_kill.read().decode())
333 '''Returns True if the job has already begun
335 :return: True if the job has already begun
338 return self._has_begun
340 def has_finished(self):
341 '''Returns True if the job has already finished
342 (i.e. all the commands have been executed)
343 If it is finished, the outputs are stored in the fields out and err.
345 :return: True if the job has already finished
349 # If the method has already been called and returned True
350 if self._has_finished:
353 # If the job has not begun yet
354 if not self.has_begun():
357 if self._stdout.channel.closed:
358 self._has_finished = True
359 # Store the result outputs
360 self.out += self._stdout.read().decode()
361 self.err += self._stderr.read().decode()
363 self._Tf = time.time()
364 # And get the remote command status and log files
367 except Exception as e:
368 self.err += _("Unable to get remote log files: %s" % e)
370 return self._has_finished
372 def get_log_files(self):
373 """Get the log files produced by the command launched
374 on the remote machine, and put it in the log directory of the user,
375 so they can be accessible from
377 # Do not get the files if the command is not finished
378 if not self.has_finished():
379 msg = _("Trying to get log files whereas the job is not finished.")
380 self.logger.write(src.printcolors.printcWarning(msg))
383 # First get the file that contains the list of log files to get
384 tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
385 remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt")
386 self.machine.sftp.get(
390 # Read the file and get the result of the command and all the log files
392 fstream_tmp = open(tmp_file_path, "r")
393 file_lines = fstream_tmp.readlines()
394 file_lines = [line.replace("\n", "") for line in file_lines]
396 os.remove(tmp_file_path)
399 # The first line is the result of the command (0 success or 1 fail)
400 self.res_job = file_lines[0]
401 except Exception as e:
402 self.err += _("Unable to get status from remote file %s: %s" %
403 (remote_path, str(e)))
405 for i, job_path_remote in enumerate(file_lines[1:]):
407 # For each command, there is two files to get :
408 # 1- The xml file describing the command and giving the
410 # 2- The txt file containing the system command traces (like
411 # traces produced by the "make" command)
412 # 3- In case of the test command, there is another file to get :
413 # the xml board that contain the test results
414 dirname = os.path.basename(os.path.dirname(job_path_remote))
415 if dirname != 'OUT' and dirname != 'TEST':
417 local_path = os.path.join(os.path.dirname(
418 self.logger.logFilePath),
419 os.path.basename(job_path_remote))
420 if i==0: # The first is the job command
421 self.logger.add_link(os.path.basename(job_path_remote),
425 elif dirname == 'OUT':
427 local_path = os.path.join(os.path.dirname(
428 self.logger.logFilePath),
430 os.path.basename(job_path_remote))
431 elif dirname == 'TEST':
433 local_path = os.path.join(os.path.dirname(
434 self.logger.logFilePath),
436 os.path.basename(job_path_remote))
439 if not os.path.exists(local_path):
440 self.machine.sftp.get(job_path_remote, local_path)
441 self.remote_log_files.append(local_path)
442 except Exception as e:
443 self.err += _("Unable to get %s log file from remote: %s" %
444 (str(job_path_remote),
447 def has_failed(self):
448 '''Returns True if the job has failed.
449 A job is considered as failed if the machine could not be reached,
450 if the remote command failed,
451 or if the job finished with a time out.
453 :return: True if the job has failed
456 if not self.has_finished():
458 if not self.machine.successfully_connected(self.logger):
460 if self.is_timeout():
462 if self.res_job == "1":
467 """In case of a failing job, one has to cancel every job that depend
468 on it. This method put the job as failed and will not be executed.
472 self._has_begun = True
473 self._has_finished = True
474 self.cancelled = True
475 self.out += _("This job was not launched because its father has failed.")
476 self.err += _("This job was not launched because its father has failed.")
478 def is_running(self):
479 '''Returns True if the job commands are running
481 :return: True if the job is running
484 return self.has_begun() and not self.has_finished()
486 def is_timeout(self):
487 '''Returns True if the job commands has finished with timeout
489 :return: True if the job has finished with timeout
492 return self._has_timouted
494 def time_elapsed(self):
495 """Get the time elapsed since the job launching
497 :return: The number of seconds
500 if not self.has_begun():
503 return T_now - self._T0
505 def check_time(self):
506 """Verify that the job has not exceeded its timeout.
507 If it has, kill the remote command and consider the job as finished.
509 if not self.has_begun():
511 if self.time_elapsed() > self.timeout:
512 self._has_finished = True
513 self._has_timouted = True
514 self._Tf = time.time()
515 (out_kill, __) = self.kill_remote_process()
516 self.out += "TIMEOUT \n" + out_kill
517 self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
520 except Exception as e:
521 self.err += _("Unable to get remote log files!\n%s\n" % str(e))
523 def total_duration(self):
524 """Give the total duration of the job
526 :return: the total duration of the job in seconds
529 return self._Tf - self._T0
532 """Launch the job by executing the remote command.
535 # Prevent multiple run
537 msg = _("Warning: A job can only be launched one time")
538 msg2 = _("Trying to launch the job \"%s\" whereas it has "
539 "already been launched." % self.name)
540 self.logger.write(src.printcolors.printcWarning("%s\n%s\n" % (msg,
544 # Do not execute the command if the machine could not be reached
545 if not self.machine.successfully_connected(self.logger):
546 self._has_finished = True
548 self.err += ("Connection to machine (name : %s, host: %s, port:"
549 " %s, user: %s) has failed\nUse the log command "
550 "to get more information."
551 % (self.machine.name,
556 # Usual case : Launch the command on remote machine
557 self._T0 = time.time()
558 self._stdin, self._stdout, self._stderr = self.machine.exec_command(
561 # If the results are not initialized, finish the job
562 if (self._stdin, self._stdout, self._stderr) == (None, None, None):
563 self._has_finished = True
564 self._Tf = time.time()
566 self.err += "The server failed to execute the command"
568 # Put the beginning flag to true.
569 self._has_begun = True
571 def write_results(self):
572 """Display on the terminal all the job's information
574 self.logger.write("name : " + self.name + "\n")
576 self.logger.write("after : %s\n" % self.after)
577 self.logger.write("Time elapsed : %4imin %2is \n" %
578 (self.total_duration()//60 , self.total_duration()%60))
580 self.logger.write("Begin time : %s\n" %
581 time.strftime('%Y-%m-%d %H:%M:%S',
582 time.localtime(self._T0)) )
584 self.logger.write("End time : %s\n\n" %
585 time.strftime('%Y-%m-%d %H:%M:%S',
586 time.localtime(self._Tf)) )
588 machine_head = "Informations about connection :\n"
589 underline = (len(machine_head) - 2) * "-"
590 self.logger.write(src.printcolors.printcInfo(
591 machine_head+underline+"\n"))
592 self.machine.write_info(self.logger)
594 self.logger.write(src.printcolors.printcInfo("out : \n"))
596 self.logger.write("Unable to get output\n")
598 self.logger.write(self.out + "\n")
599 self.logger.write(src.printcolors.printcInfo("err : \n"))
600 self.logger.write(self.err + "\n")
602 def get_status(self):
603 """Get the status of the job (used by the Gui for xml display)
605 :return: The current status of the job
608 if not self.machine.successfully_connected(self.logger):
609 return "SSH connection KO"
610 if not self.has_begun():
611 return "Not launched"
614 if self.is_running():
615 return "running since " + time.strftime('%Y-%m-%d %H:%M:%S',
616 time.localtime(self._T0))
617 if self.has_finished():
618 if self.is_timeout():
619 return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S',
620 time.localtime(self._Tf))
621 return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S',
622 time.localtime(self._Tf))
625 '''Class to manage the jobs to be run
632 lenght_columns = 20):
633 # The jobs configuration
634 self.cfg_jobs = config_jobs
635 self.job_file_path = job_file_path
636 # The machine that will be used today
638 # The list of machine (hosts, port) that will be used today
639 # (a same host can have several machine instances since there
640 # can be several ssh parameters)
642 # The jobs to be launched today
644 # The jobs that will not be launched today
645 self.ljobs_not_today = []
648 self.len_columns = lenght_columns
650 # the list of jobs that have not been run yet
651 self._l_jobs_not_started = []
652 # the list of jobs that have already ran
653 self._l_jobs_finished = []
654 # the list of jobs that are running
655 self._l_jobs_running = []
657 self.determine_jobs_and_machines()
659 def define_job(self, job_def, machine):
660 '''Takes a pyconf job definition and a machine (from class machine)
661 and returns the job instance corresponding to the definition.
663 :param job_def src.config.Mapping: a job definition
664 :param machine machine: the machine on which the job will run
665 :return: The corresponding job in a job class instance
669 cmmnds = job_def.commands
670 if not "timeout" in job_def:
671 timeout = 4*60*60 # default timeout = 4h
673 timeout = job_def.timeout
675 if 'after' in job_def:
676 after = job_def.after
678 if 'application' in job_def:
679 application = job_def.application
681 if 'board' in job_def:
682 board = job_def.board
684 if "prefix" in job_def:
685 prefix = job_def.prefix
699 def determine_jobs_and_machines(self):
700 '''Function that reads the pyconf jobs definition and instantiates all
701 the machines and jobs to be done today.
706 today = datetime.date.weekday(datetime.date.today())
709 for job_def in self.cfg_jobs.jobs :
711 if not "machine" in job_def:
712 msg = _('WARNING: The job "%s" do not have the key '
713 '"machine", this job is ignored.\n\n' % job_def.name)
714 self.logger.write(src.printcolors.printcWarning(msg))
716 name_machine = job_def.machine
719 for mach in self.lmachines:
720 if mach.name == name_machine:
724 if a_machine == None:
725 for machine_def in self.cfg_jobs.machines:
726 if machine_def.name == name_machine:
727 if 'host' not in machine_def:
728 host = self.runner.cfg.VARS.hostname
730 host = machine_def.host
732 if 'user' not in machine_def:
733 user = self.runner.cfg.VARS.user
735 user = machine_def.user
737 if 'port' not in machine_def:
740 port = machine_def.port
742 if 'password' not in machine_def:
745 passwd = machine_def.password
747 if 'sat_path' not in machine_def:
748 sat_path = "salomeTools"
750 sat_path = machine_def.sat_path
761 self.lmachines.append(a_machine)
762 if (host, port) not in host_list:
763 host_list.append((host, port))
765 if a_machine == None:
766 msg = _("WARNING: The job \"%(job_name)s\" requires the "
767 "machine \"%(machine_name)s\" but this machine "
768 "is not defined in the configuration file.\n"
769 "The job will not be launched\n")
770 self.logger.write(src.printcolors.printcWarning(
771 msg % {"job_name" : job_def.name,
772 "machine_name" : name_machine}))
775 a_job = self.define_job(job_def, a_machine)
777 if today in job_def.when:
778 self.ljobs.append(a_job)
779 else: # today in job_def.when
780 self.ljobs_not_today.append(a_job)
782 self.lhosts = host_list
784 def ssh_connection_all_machines(self, pad=50):
785 '''Function that do the ssh connection to every machine
791 self.logger.write(src.printcolors.printcInfo((
792 "Establishing connection with all the machines :\n")))
793 for machine in self.lmachines:
794 # little algorithm in order to display traces
795 begin_line = (_("Connection to %s: " % machine.name))
796 if pad - len(begin_line) < 0:
799 endline = (pad - len(begin_line)) * "." + " "
801 step = "SSH connection"
802 self.logger.write( begin_line + endline + step)
804 # the call to the method that initiate the ssh connection
805 msg = machine.connect(self.logger)
807 # Copy salomeTools to the remote machine
808 if machine.successfully_connected(self.logger):
809 step = _("Remove SAT")
810 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
811 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
812 (__, out_dist, __) = machine.exec_command(
813 "rm -rf %s" % machine.sat_path,
819 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
820 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
822 res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway,
825 # set the local settings of sat on the remote machine using
827 (__, out_dist, __) = machine.exec_command(
828 os.path.join(machine.sat_path,
829 "sat init --base unknown --workdir"
830 " unknown --log_dir unknown"),
834 # get the remote machine distribution using a sat command
835 (__, out_dist, __) = machine.exec_command(
836 os.path.join(machine.sat_path,
837 "sat config --value VARS.dist --no_label"),
839 machine.distribution = out_dist.read().decode().replace("\n",
842 # Print the status of the copy
844 self.logger.write('\r%s' %
845 ((len(begin_line)+len(endline)+20) * " "), 3)
846 self.logger.write('\r%s%s%s' %
849 src.printcolors.printc(src.OK_STATUS)), 3)
851 self.logger.write('\r%s' %
852 ((len(begin_line)+len(endline)+20) * " "), 3)
853 self.logger.write('\r%s%s%s %s' %
856 src.printcolors.printc(src.KO_STATUS),
857 _("Copy of SAT failed: %s" % res_copy)), 3)
859 self.logger.write('\r%s' %
860 ((len(begin_line)+len(endline)+20) * " "), 3)
861 self.logger.write('\r%s%s%s %s' %
864 src.printcolors.printc(src.KO_STATUS),
866 self.logger.write("\n", 3)
868 self.logger.write("\n")
871 def is_occupied(self, hostname):
872 '''Function that returns True if a job is running on
873 the machine defined by its host and its port.
875 :param hostname (str, int): the pair (host, port)
876 :return: the job that is running on the host,
877 or false if there is no job running on the host.
882 for jb in self.ljobs:
883 if jb.machine.host == host and jb.machine.port == port:
888 def update_jobs_states_list(self):
889 '''Function that updates the lists that store the currently
890 running jobs and the jobs that have already finished.
895 jobs_finished_list = []
896 jobs_running_list = []
897 for jb in self.ljobs:
899 jobs_running_list.append(jb)
901 if jb.has_finished():
902 jobs_finished_list.append(jb)
904 nb_job_finished_before = len(self._l_jobs_finished)
905 self._l_jobs_finished = jobs_finished_list
906 self._l_jobs_running = jobs_running_list
908 nb_job_finished_now = len(self._l_jobs_finished)
910 return nb_job_finished_now > nb_job_finished_before
912 def cancel_dependencies_of_failing_jobs(self):
913 '''Function that cancels all the jobs that depend on a failing one.
919 for job in self.ljobs:
920 if job.after is None:
922 father_job = self.find_job_that_has_name(job.after)
923 if father_job is not None and father_job.has_failed():
926 def find_job_that_has_name(self, name):
927 '''Returns the job by its name.
929 :param name str: a job name
930 :return: the job that has the name.
933 for jb in self.ljobs:
936 # the following is executed only if the job was not found
939 def str_of_length(self, text, length):
940 '''Takes a string text of any length and returns
941 the most close string of length "length".
943 :param text str: any string
944 :param length int: a length for the returned string
945 :return: the most close string of length "length"
948 if len(text) > length:
949 text_out = text[:length-3] + '...'
951 diff = length - len(text)
952 before = " " * (diff//2)
953 after = " " * (diff//2 + diff%2)
954 text_out = before + text + after
958 def display_status(self, len_col):
959 '''Takes a lenght and construct the display of the current status
960 of the jobs in an array that has a column for each host.
961 It displays the job that is currently running on the host
964 :param len_col int: the size of the column
970 for host_port in self.lhosts:
971 jb = self.is_occupied(host_port)
972 if not jb: # nothing running on the host
973 empty = self.str_of_length("empty", len_col)
974 display_line += "|" + empty
976 display_line += "|" + src.printcolors.printcInfo(
977 self.str_of_length(jb.name, len_col))
979 self.logger.write("\r" + display_line + "|")
984 '''The main method. Runs all the jobs on every host.
985 For each host, at a given time, only one job can be running.
986 The jobs that have the field after (that contain the job that has
987 to be run before it) are run after the previous job.
988 This method stops when all the jobs are finished.
995 self.logger.write(src.printcolors.printcInfo(
996 _('Executing the jobs :\n')))
998 for host_port in self.lhosts:
1001 if port == 22: # default value
1002 text_line += "|" + self.str_of_length(host, self.len_columns)
1004 text_line += "|" + self.str_of_length(
1005 "("+host+", "+str(port)+")", self.len_columns)
1007 tiret_line = " " + "-"*(len(text_line)-1) + "\n"
1008 self.logger.write(tiret_line)
1009 self.logger.write(text_line + "|\n")
1010 self.logger.write(tiret_line)
1013 # The infinite loop that runs the jobs
1014 l_jobs_not_started = src.deepcopy_list(self.ljobs)
1015 while len(self._l_jobs_finished) != len(self.ljobs):
1016 new_job_start = False
1017 for host_port in self.lhosts:
1019 if self.is_occupied(host_port):
1022 for jb in l_jobs_not_started:
1023 if (jb.machine.host, jb.machine.port) != host_port:
1025 if jb.after == None:
1027 l_jobs_not_started.remove(jb)
1028 new_job_start = True
1031 jb_before = self.find_job_that_has_name(jb.after)
1032 if jb_before is None:
1034 msg = _("This job was not launched because its "
1035 "father is not in the jobs list.")
1039 if jb_before.has_finished():
1041 l_jobs_not_started.remove(jb)
1042 new_job_start = True
1044 self.cancel_dependencies_of_failing_jobs()
1045 new_job_finished = self.update_jobs_states_list()
1047 if new_job_start or new_job_finished:
1049 self.gui.update_xml_files(self.ljobs)
1050 # Display the current status
1051 self.display_status(self.len_columns)
1053 # Make sure that the proc is not entirely busy
1056 self.logger.write("\n")
1057 self.logger.write(tiret_line)
1058 self.logger.write("\n\n")
1061 self.gui.update_xml_files(self.ljobs)
1062 self.gui.last_update()
1064 def write_all_results(self):
1065 '''Display all the jobs outputs.
1071 for jb in self.ljobs:
1072 self.logger.write(src.printcolors.printcLabel(
1073 "#------- Results for job %s -------#\n" % jb.name))
1075 self.logger.write("\n\n")
1078 '''Class to manage the the xml data that can be displayed in a browser to
1091 :param xml_dir_path str: The path to the directory where to put
1092 the xml resulting files
1093 :param l_jobs List: the list of jobs that run today
1094 :param l_jobs_not_today List: the list of jobs that do not run today
1095 :param file_boards str: the file path from which to read the
1098 # The logging instance
1099 self.logger = logger
1101 # The prefix to add to the xml files : date_hour
1102 self.prefix = prefix
1104 # The path of the csv files to read to fill the expected boards
1105 self.file_boards = file_boards
1107 if file_boards != "":
1108 today = datetime.date.weekday(datetime.date.today())
1109 self.parse_csv_boards(today)
1111 self.d_input_boards = {}
1113 # The path of the global xml file
1114 self.xml_dir_path = xml_dir_path
1115 # Initialize the xml files
1116 self.global_name = "global_report"
1117 xml_global_path = os.path.join(self.xml_dir_path,
1118 self.global_name + ".xml")
1119 self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path,
1122 # Find history for each job
1124 self.find_history(l_jobs, l_jobs_not_today)
1126 # The xml files that corresponds to the boards.
1127 # {name_board : xml_object}}
1128 self.d_xml_board_files = {}
1130 # Create the lines and columns
1131 self.initialize_boards(l_jobs, l_jobs_not_today)
1133 # Write the xml file
1134 self.update_xml_files(l_jobs)
1136 def add_xml_board(self, name):
1137 '''Add a board to the board list
1138 :param name str: the board name
1140 xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
1141 self.d_xml_board_files[name] = src.xmlManager.XmlLogFile(
1144 self.d_xml_board_files[name].add_simple_node("distributions")
1145 self.d_xml_board_files[name].add_simple_node("applications")
1146 self.d_xml_board_files[name].add_simple_node("board", text=name)
1148 def initialize_boards(self, l_jobs, l_jobs_not_today):
1149 '''Get all the first information needed for each file and write the
1150 first version of the files
1151 :param l_jobs List: the list of jobs that run today
1152 :param l_jobs_not_today List: the list of jobs that do not run today
1154 # Get the boards to fill and put it in a dictionary
1155 # {board_name : xml instance corresponding to the board}
1156 for job in l_jobs + l_jobs_not_today:
1158 if (board is not None and
1159 board not in self.d_xml_board_files.keys()):
1160 self.add_xml_board(board)
1162 # Verify that the boards given as input are done
1163 for board in list(self.d_input_boards.keys()):
1164 if board not in self.d_xml_board_files:
1165 self.add_xml_board(board)
1166 root_node = self.d_xml_board_files[board].xmlroot
1167 src.xmlManager.append_node_attrib(root_node,
1168 {"input_file" : self.file_boards})
1170 # Loop over all jobs in order to get the lines and columns for each
1174 for board in self.d_xml_board_files:
1176 d_application[board] = []
1180 for job in l_jobs + l_jobs_not_today:
1182 if (job.machine.host, job.machine.port) not in l_hosts_ports:
1183 l_hosts_ports.append((job.machine.host, job.machine.port))
1185 distrib = job.machine.distribution
1186 application = job.application
1188 board_job = job.board
1191 for board in self.d_xml_board_files:
1192 if board_job == board:
1193 if (distrib not in [None, ''] and
1194 distrib not in d_dist[board]):
1195 d_dist[board].append(distrib)
1196 src.xmlManager.add_simple_node(
1197 self.d_xml_board_files[board].xmlroot.find(
1200 attrib={"name" : distrib})
1202 if board_job == board:
1203 if (application not in [None, ''] and
1204 application not in d_application[board]):
1205 d_application[board].append(application)
1206 src.xmlManager.add_simple_node(
1207 self.d_xml_board_files[board].xmlroot.find(
1211 "name" : application})
1213 # Verify that there are no missing application or distribution in the
1214 # xml board files (regarding the input boards)
1215 for board in self.d_xml_board_files:
1216 l_dist = d_dist[board]
1217 if board not in self.d_input_boards.keys():
1219 for dist in self.d_input_boards[board]["rows"]:
1220 if dist not in l_dist:
1221 src.xmlManager.add_simple_node(
1222 self.d_xml_board_files[board].xmlroot.find(
1225 attrib={"name" : dist})
1226 l_appli = d_application[board]
1227 for appli in self.d_input_boards[board]["columns"]:
1228 if appli not in l_appli:
1229 src.xmlManager.add_simple_node(
1230 self.d_xml_board_files[board].xmlroot.find(
1233 attrib={"name" : appli})
1235 # Initialize the hosts_ports node for the global file
1236 self.xmlhosts_ports = self.xml_global_file.add_simple_node(
1238 for host, port in l_hosts_ports:
1239 host_port = "%s:%i" % (host, port)
1240 src.xmlManager.add_simple_node(self.xmlhosts_ports,
1242 attrib={"name" : host_port})
1244 # Initialize the jobs node in all files
1245 for xml_file in [self.xml_global_file] + list(
1246 self.d_xml_board_files.values()):
1247 xml_jobs = xml_file.add_simple_node("jobs")
1248 # Get the jobs present in the config file but
1249 # that will not be launched today
1250 self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
1252 # add also the infos node
1253 xml_file.add_simple_node("infos",
1254 attrib={"name" : "last update",
1255 "JobsCommandStatus" : "running"})
1257 # and put the history node
1258 history_node = xml_file.add_simple_node("history")
1259 name_board = os.path.basename(xml_file.logFile)[:-len(".xml")]
1260 # serach for board files
1261 expression = "^[0-9]{8}_+[0-9]{6}_" + name_board + ".xml$"
1262 oExpr = re.compile(expression)
1263 # Get the list of xml borad files that are in the log directory
1264 for file_name in os.listdir(self.xml_dir_path):
1265 if oExpr.search(file_name):
1266 date = os.path.basename(file_name).split("_")[0]
1267 file_path = os.path.join(self.xml_dir_path, file_name)
1268 src.xmlManager.add_simple_node(history_node,
1271 attrib={"date" : date})
1274 # Find in each board the squares that needs to be filled regarding the
1275 # input csv files but that are not covered by a today job
1276 for board in self.d_input_boards.keys():
1277 xml_root_board = self.d_xml_board_files[board].xmlroot
1278 # Find the missing jobs for today
1279 xml_missing = src.xmlManager.add_simple_node(xml_root_board,
1281 for row, column in self.d_input_boards[board]["jobs"]:
1284 if (job.application == column and
1285 job.machine.distribution == row):
1289 src.xmlManager.add_simple_node(xml_missing,
1291 attrib={"distribution" : row,
1292 "application" : column })
1293 # Find the missing jobs not today
1294 xml_missing_not_today = src.xmlManager.add_simple_node(
1296 "missing_jobs_not_today")
1297 for row, column in self.d_input_boards[board]["jobs_not_today"]:
1299 for job in l_jobs_not_today:
1300 if (job.application == column and
1301 job.machine.distribution == row):
1305 src.xmlManager.add_simple_node(xml_missing_not_today,
1307 attrib={"distribution" : row,
1308 "application" : column })
1310 def find_history(self, l_jobs, l_jobs_not_today):
1311 """find, for each job, in the existent xml boards the results for the
1312 job. Store the results in the dictionnary self.history = {name_job :
1313 list of (date, status, list links)}
1315 :param l_jobs List: the list of jobs to run today
1316 :param l_jobs_not_today List: the list of jobs that do not run today
1318 # load the all the history
1319 expression = "^[0-9]{8}_+[0-9]{6}_" + self.global_name + ".xml$"
1320 oExpr = re.compile(expression)
1321 # Get the list of global xml that are in the log directory
1323 for file_name in os.listdir(self.xml_dir_path):
1324 if oExpr.search(file_name):
1325 file_path = os.path.join(self.xml_dir_path, file_name)
1327 global_xml = src.xmlManager.ReadXmlFile(file_path)
1328 l_globalxml.append(global_xml)
1329 except Exception as e:
1330 msg = _("\nWARNING: the file %s can not be read, it will be "
1331 "ignored\n%s" % (file_path, e))
1332 self.logger.write("%s\n" % src.printcolors.printcWarning(
1335 # Construct the dictionnary self.history
1336 for job in l_jobs + l_jobs_not_today:
1338 for global_xml in l_globalxml:
1339 date = os.path.basename(global_xml.filePath).split("_")[0]
1340 global_root_node = global_xml.xmlroot.find("jobs")
1341 job_node = src.xmlManager.find_node_by_attrib(
1347 if job_node.find("remote_log_file_path") is not None:
1348 link = job_node.find("remote_log_file_path").text
1349 res_job = job_node.find("res").text
1350 if link != "nothing":
1351 l_links.append((date, res_job, link))
1352 l_links = sorted(l_links, reverse=True)
1353 self.history[job.name] = l_links
1355 def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
1356 '''Get all the first information needed for each file and write the
1357 first version of the files
1359 :param xml_node_jobs etree.Element: the node corresponding to a job
1360 :param l_jobs_not_today List: the list of jobs that do not run today
1362 for job in l_jobs_not_today:
1363 xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1365 attrib={"name" : job.name})
1366 src.xmlManager.add_simple_node(xmlj, "application", job.application)
1367 src.xmlManager.add_simple_node(xmlj,
1369 job.machine.distribution)
1370 src.xmlManager.add_simple_node(xmlj, "board", job.board)
1371 src.xmlManager.add_simple_node(xmlj,
1372 "commands", " ; ".join(job.commands))
1373 src.xmlManager.add_simple_node(xmlj, "state", "Not today")
1374 src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1375 src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1376 src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1377 src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1378 src.xmlManager.add_simple_node(xmlj, "sat_path",
1379 job.machine.sat_path)
1380 xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1381 for i, (date, res_job, link) in enumerate(self.history[job.name]):
1383 # tag the first one (the last one)
1384 src.xmlManager.add_simple_node(xml_history,
1387 attrib={"date" : date,
1391 src.xmlManager.add_simple_node(xml_history,
1394 attrib={"date" : date,
1398 def parse_csv_boards(self, today):
1399 """ Parse the csv file that describes the boards to produce and fill
1400 the dict d_input_boards that contain the csv file contain
1402 :param today int: the current day of the week
1404 # open the csv file and read its content
1406 with open(self.file_boards, 'r') as f:
1407 reader = csv.reader(f,delimiter=CSV_DELIMITER)
1410 # get the delimiter for the boards (empty line)
1411 boards_delimiter = [''] * len(l_read[0])
1412 # Make the list of boards, by splitting with the delimiter
1413 l_boards = [list(y) for x, y in itertools.groupby(l_read,
1414 lambda z: z == boards_delimiter) if not x]
1416 # loop over the csv lists of lines and get the rows, columns and jobs
1418 for input_board in l_boards:
1420 board_name = input_board[0][0]
1423 columns = input_board[0][1:]
1428 for line in input_board[1:]:
1431 for i, square in enumerate(line[1:]):
1434 days = square.split(DAYS_SEPARATOR)
1435 days = [int(day) for day in days]
1436 job = (row, columns[i])
1440 jobs_not_today.append(job)
1442 d_boards[board_name] = {"rows" : rows,
1443 "columns" : columns,
1445 "jobs_not_today" : jobs_not_today}
1447 self.d_input_boards = d_boards
1449 def update_xml_files(self, l_jobs):
1450 '''Write all the xml files with updated information about the jobs
1452 :param l_jobs List: the list of jobs that run today
1454 for xml_file in [self.xml_global_file] + list(
1455 self.d_xml_board_files.values()):
1456 self.update_xml_file(l_jobs, xml_file)
1459 self.write_xml_files()
1461 def update_xml_file(self, l_jobs, xml_file):
1462 '''update information about the jobs for the file xml_file
1464 :param l_jobs List: the list of jobs that run today
1465 :param xml_file xmlManager.XmlLogFile: the xml instance to update
1468 xml_node_jobs = xml_file.xmlroot.find('jobs')
1469 # Update the job names and status node
1471 # Find the node corresponding to the job and delete it
1472 # in order to recreate it
1473 for xmljob in xml_node_jobs.findall('job'):
1474 if xmljob.attrib['name'] == job.name:
1475 xml_node_jobs.remove(xmljob)
1479 T0 = time.strftime('%Y-%m-%d %H:%M:%S',
1480 time.localtime(job._T0))
1483 Tf = time.strftime('%Y-%m-%d %H:%M:%S',
1484 time.localtime(job._Tf))
1486 # recreate the job node
1487 xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1489 attrib={"name" : job.name})
1490 src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1491 src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1492 src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1493 src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1494 xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1495 for date, res_job, link in self.history[job.name]:
1496 src.xmlManager.add_simple_node(xml_history,
1499 attrib={"date" : date,
1502 src.xmlManager.add_simple_node(xmlj, "sat_path",
1503 job.machine.sat_path)
1504 src.xmlManager.add_simple_node(xmlj, "application", job.application)
1505 src.xmlManager.add_simple_node(xmlj, "distribution",
1506 job.machine.distribution)
1507 src.xmlManager.add_simple_node(xmlj, "board", job.board)
1508 src.xmlManager.add_simple_node(xmlj, "timeout", str(job.timeout))
1509 src.xmlManager.add_simple_node(xmlj, "commands",
1510 " ; ".join(job.commands))
1511 src.xmlManager.add_simple_node(xmlj, "state", job.get_status())
1512 src.xmlManager.add_simple_node(xmlj, "begin", T0)
1513 src.xmlManager.add_simple_node(xmlj, "end", Tf)
1514 src.xmlManager.add_simple_node(xmlj, "out",
1515 src.printcolors.cleancolor(job.out))
1516 src.xmlManager.add_simple_node(xmlj, "err",
1517 src.printcolors.cleancolor(job.err))
1518 src.xmlManager.add_simple_node(xmlj, "res", str(job.res_job))
1519 if len(job.remote_log_files) > 0:
1520 src.xmlManager.add_simple_node(xmlj,
1521 "remote_log_file_path",
1522 job.remote_log_files[0])
1524 src.xmlManager.add_simple_node(xmlj,
1525 "remote_log_file_path",
1527 # Search for the test log if there is any
1528 l_test_log_files = self.find_test_log(job.remote_log_files)
1529 xml_test = src.xmlManager.add_simple_node(xmlj,
1530 "test_log_file_path")
1531 for test_log_path, res_test, nb_fails in l_test_log_files:
1532 test_path_node = src.xmlManager.add_simple_node(xml_test,
1535 test_path_node.attrib["res"] = res_test
1536 test_path_node.attrib["nb_fails"] = nb_fails
1538 xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
1539 # get the job father
1540 if job.after is not None:
1543 if jb.name == job.after:
1546 if (job_father is not None and
1547 len(job_father.remote_log_files) > 0):
1548 link = job_father.remote_log_files[0]
1551 src.xmlManager.append_node_attrib(xmlafter, {"link" : link})
1553 # Verify that the job is to be done today regarding the input csv
1555 if job.board and job.board in self.d_input_boards.keys():
1557 for dist, appli in self.d_input_boards[job.board]["jobs"]:
1558 if (job.machine.distribution == dist
1559 and job.application == appli):
1561 src.xmlManager.add_simple_node(xmlj,
1566 src.xmlManager.add_simple_node(xmlj,
1572 xml_node_infos = xml_file.xmlroot.find('infos')
1573 src.xmlManager.append_node_attrib(xml_node_infos,
1575 datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
1578 def find_test_log(self, l_remote_log_files):
1579 '''Find if there is a test log (board) in the remote log files and
1580 the path to it. There can be several test command, so the result is
1583 :param l_remote_log_files List: the list of all remote log files
1584 :return: the list of (test log files path, res of the command)
1588 for file_path in l_remote_log_files:
1589 dirname = os.path.basename(os.path.dirname(file_path))
1590 file_name = os.path.basename(file_path)
1591 regex = src.logger.log_all_command_file_expression
1592 oExpr = re.compile(regex)
1593 if dirname == "TEST" and oExpr.search(file_name):
1594 # find the res of the command
1595 prod_node = etree.parse(file_path).getroot().find("product")
1596 res_test = prod_node.attrib["global_res"]
1597 # find the number of fails
1598 testbase_node = prod_node.find("tests").find("testbase")
1599 nb_fails = int(testbase_node.attrib["failed"])
1600 # put the file path, the res of the test command and the number
1601 # of fails in the output
1602 res.append((file_path, res_test, nb_fails))
1606 def last_update(self, finish_status = "finished"):
1607 '''update information about the jobs for the file xml_file
1609 :param l_jobs List: the list of jobs that run today
1610 :param xml_file xmlManager.XmlLogFile: the xml instance to update
1612 for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
1613 xml_node_infos = xml_file.xmlroot.find('infos')
1614 src.xmlManager.append_node_attrib(xml_node_infos,
1615 attrib={"JobsCommandStatus" : finish_status})
1617 self.write_xml_files()
1619 def write_xml_file(self, xml_file, stylesheet):
1620 ''' Write one xml file and the same file with prefix
1622 xml_file.write_tree(stylesheet)
1623 file_path = xml_file.logFile
1624 file_dir = os.path.dirname(file_path)
1625 file_name = os.path.basename(file_path)
1626 file_name_with_prefix = self.prefix + "_" + file_name
1627 xml_file.write_tree(stylesheet, os.path.join(file_dir,
1628 file_name_with_prefix))
1630 def write_xml_files(self):
1631 ''' Write the xml files
1633 self.write_xml_file(self.xml_global_file, STYLESHEET_GLOBAL)
1634 for xml_file in self.d_xml_board_files.values():
1635 self.write_xml_file(xml_file, STYLESHEET_BOARD)
1637 def get_config_file_path(job_config_name, l_cfg_dir):
1639 file_jobs_cfg = None
1640 if os.path.exists(job_config_name) and job_config_name.endswith(".pyconf"):
1642 file_jobs_cfg = job_config_name
1644 for cfg_dir in l_cfg_dir:
1645 file_jobs_cfg = os.path.join(cfg_dir, job_config_name)
1646 if not file_jobs_cfg.endswith('.pyconf'):
1647 file_jobs_cfg += '.pyconf'
1649 if not os.path.exists(file_jobs_cfg):
1654 return found, file_jobs_cfg
1656 def develop_factorized_jobs(config_jobs):
1657 '''update information about the jobs for the file xml_file
1659 :param config_jobs Config: the config corresponding to the jos description
1661 developed_jobs_list = []
1662 for jb in config_jobs.jobs:
1663 # case where the jobs are not developed
1664 if type(jb.machine) == type(""):
1665 developed_jobs_list.append(jb)
1667 # Case where the jobs must be developed
1669 # machine : ["CO7.2 physique", ["CO6.4 physique", $MONDAY, $TUESDAY ], "FD22"]
1671 for machine in jb.machine:
1672 new_job = src.pyconf.deepCopyMapping(jb)
1673 # case where there is a jobs on the machine corresponding to all
1674 # days in when variable.
1675 if type(machine) == type(""):
1676 new_job.machine = machine
1677 new_job.name = name_job + " / " + machine
1679 # case the days are re defined
1680 new_job.machine = machine[0]
1681 new_job.name = name_job + " / " + machine[0]
1682 new_job.when = machine[1:]
1683 developed_jobs_list.append(new_job)
1685 config_jobs.jobs = developed_jobs_list
1689 # Describes the command
1691 return _("The jobs command launches maintenances that are described"
1692 " in the dedicated jobs configuration file.\n\nexample:\nsat "
1693 "jobs --name my_jobs --publish")
1697 def run(args, runner, logger):
1699 (options, args) = parser.parse_args(args)
1701 l_cfg_dir = runner.cfg.PATHS.JOBPATH
1703 # list option : display all the available config files
1705 for cfg_dir in l_cfg_dir:
1706 if not options.no_label:
1707 logger.write("------ %s\n" %
1708 src.printcolors.printcHeader(cfg_dir))
1709 if not os.path.exists(cfg_dir):
1711 for f in sorted(os.listdir(cfg_dir)):
1712 if not f.endswith('.pyconf'):
1715 logger.write("%s\n" % cfilename)
1718 # Make sure the jobs_config option has been called
1719 if not options.jobs_cfg:
1720 message = _("The option --jobs_config is required\n")
1721 src.printcolors.printcError(message)
1724 # Find the file in the directories, unless it is a full path
1725 # merge all in a config
1726 merger = src.pyconf.ConfigMerger()
1727 config_jobs = src.pyconf.Config()
1728 l_conf_files_path = []
1729 for config_file in options.jobs_cfg:
1730 found, file_jobs_cfg = get_config_file_path(config_file, l_cfg_dir)
1732 msg = _("The file configuration %s was not found."
1733 "\nUse the --list option to get the "
1734 "possible files." % config_file)
1735 logger.write("%s\n" % src.printcolors.printcError(msg), 1)
1737 l_conf_files_path.append(file_jobs_cfg)
1738 # Read the config that is in the file
1739 one_config_jobs = src.read_config_from_a_file(file_jobs_cfg)
1740 merger.merge(config_jobs, one_config_jobs)
1743 (_("Platform"), runner.cfg.VARS.dist),
1744 (_("Files containing the jobs configuration"), l_conf_files_path)
1746 src.print_info(logger, info)
1748 if options.only_jobs:
1749 l_jb = src.pyconf.Sequence()
1750 for jb in config_jobs.jobs:
1751 if jb.name in options.only_jobs:
1753 "Job that was given in only_jobs option parameters\n")
1754 config_jobs.jobs = l_jb
1756 # Parse the config jobs in order to develop all the factorized jobs
1757 develop_factorized_jobs(config_jobs)
1759 # Make a unique file that contain all the jobs in order to use it
1761 name_pyconf = "_".join([os.path.basename(path)[:-len('.pyconf')]
1762 for path in l_conf_files_path]) + ".pyconf"
1763 path_pyconf = src.get_tmp_filename(runner.cfg, name_pyconf)
1765 f = file( path_pyconf , 'w')
1766 config_jobs.__save__(f)
1768 # log the paramiko problems
1769 log_dir = src.get_log_path(runner.cfg)
1770 paramiko_log_dir_path = os.path.join(log_dir, "JOBS")
1771 src.ensure_path_exists(paramiko_log_dir_path)
1772 paramiko.util.log_to_file(os.path.join(paramiko_log_dir_path,
1773 logger.txtFileName))
1776 today_jobs = Jobs(runner,
1781 # SSH connection to all machines
1782 today_jobs.ssh_connection_all_machines()
1783 if options.test_connection:
1788 logger.write(src.printcolors.printcInfo(
1789 _("Initialize the xml boards : ")), 5)
1792 # Copy the stylesheets in the log directory
1794 xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
1796 files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
1797 files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD))
1798 files_to_copy.append(os.path.join(xsl_dir, "command.xsl"))
1799 files_to_copy.append(os.path.join(xsl_dir, "running.gif"))
1800 for file_path in files_to_copy:
1801 shutil.copy2(file_path, log_dir)
1803 # Instanciate the Gui in order to produce the xml files that contain all
1807 today_jobs.ljobs_not_today,
1808 runner.cfg.VARS.datehour,
1810 file_boards = options.input_boards)
1812 logger.write(src.printcolors.printcSuccess("OK"), 5)
1813 logger.write("\n\n", 5)
1816 # Display the list of the xml files
1817 logger.write(src.printcolors.printcInfo(("Here is the list of published"
1819 logger.write("%s\n" % gui.xml_global_file.logFile, 4)
1820 for board in gui.d_xml_board_files.keys():
1821 file_path = gui.d_xml_board_files[board].logFile
1822 file_name = os.path.basename(file_path)
1823 logger.write("%s\n" % file_path, 4)
1824 logger.add_link(file_name, "board", 0, board)
1826 logger.write("\n", 4)
1828 today_jobs.gui = gui
1832 # Run all the jobs contained in config_jobs
1833 today_jobs.run_jobs()
1834 except KeyboardInterrupt:
1836 logger.write("\n\n%s\n\n" %
1837 (src.printcolors.printcWarning(_("Forced interruption"))), 1)
1838 except Exception as e:
1839 msg = _("CRITICAL ERROR: The jobs loop has been interrupted\n")
1840 logger.write("\n\n%s\n" % src.printcolors.printcError(msg) )
1841 logger.write("%s\n" % str(e))
1843 __, __, exc_traceback = sys.exc_info()
1844 fp = tempfile.TemporaryFile()
1845 traceback.print_tb(exc_traceback, file=fp)
1848 logger.write("\nTRACEBACK: %s\n" % stack.replace('"',"'"), 1)
1854 msg = _("Killing the running jobs and trying"
1855 " to get the corresponding logs\n")
1856 logger.write(src.printcolors.printcWarning(msg))
1858 # find the potential not finished jobs and kill them
1859 for jb in today_jobs.ljobs:
1860 if not jb.has_finished():
1863 jb.kill_remote_process()
1864 except Exception as e:
1865 msg = _("Failed to kill job %s: %s\n" % (jb.name, e))
1866 logger.write(src.printcolors.printcWarning(msg))
1867 if jb.res_job != "0":
1871 today_jobs.gui.last_update(_("Forced interruption"))
1874 today_jobs.gui.last_update()
1875 # Output the results
1876 today_jobs.write_all_results()
1877 # Remove the temporary pyconf file
1878 if os.path.exists(path_pyconf):
1879 os.remove(path_pyconf)