3 # Copyright (C) 2010-2013 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 paramiko = "import paramiko impossible"
38 import src.ElementTree as etree
40 STYLESHEET_GLOBAL = "jobs_global_report.xsl"
41 STYLESHEET_BOARD = "jobs_board_report.xsl"
46 parser = src.options.Options()
48 parser.add_option('n', 'name', 'list2', 'jobs_cfg',
49 _('Mandatory: The name of the config file that contains'
50 ' the jobs configuration. Can be a list.'))
51 parser.add_option('o', 'only_jobs', 'list2', 'only_jobs',
52 _('Optional: the list of jobs to launch, by their name. '))
53 parser.add_option('l', 'list', 'boolean', 'list',
54 _('Optional: list all available config files.'))
55 parser.add_option('t', 'test_connection', 'boolean', 'test_connection',
56 _("Optional: try to connect to the machines. "
57 "Not executing the jobs."),
59 parser.add_option('p', 'publish', 'boolean', 'publish',
60 _("Optional: generate an xml file that can be read in a "
61 "browser to display the jobs status."),
63 parser.add_option('i', 'input_boards', 'string', 'input_boards', _("Optional: "
64 "the path to csv file that contain "
65 "the expected boards."),"")
66 parser.add_option('', 'completion', 'boolean', 'no_label',
67 _("Optional (internal use): do not print labels, Works only "
71 class Machine(object):
72 '''Class to manage a ssh connection on a machine
80 sat_path="salomeTools"):
84 self.distribution = None # Will be filled after copying SAT on the machine
86 self.password = passwd
87 self.sat_path = sat_path
88 self.ssh = paramiko.SSHClient()
89 self._connection_successful = None
91 def connect(self, logger):
92 '''Initiate the ssh connection to the remote machine
94 :param logger src.logger.Logger: The logger instance
99 self._connection_successful = False
100 self.ssh.load_system_host_keys()
101 self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
103 self.ssh.connect(self.host,
106 password = self.password)
107 except paramiko.AuthenticationException:
108 message = src.KO_STATUS + _("Authentication failed")
109 except paramiko.BadHostKeyException:
110 message = (src.KO_STATUS +
111 _("The server's host key could not be verified"))
112 except paramiko.SSHException:
113 message = ( _("SSHException error connecting or "
114 "establishing an SSH session"))
116 message = ( _("Error connecting or establishing an SSH session"))
118 self._connection_successful = True
122 def successfully_connected(self, logger):
123 '''Verify if the connection to the remote machine has succeed
125 :param logger src.logger.Logger: The logger instance
126 :return: True if the connection has succeed, False if not
129 if self._connection_successful == None:
130 message = _("Warning : trying to ask if the connection to "
131 "(name: %s host: %s, port: %s, user: %s) is OK whereas there were"
132 " no connection request" %
133 (self.name, self.host, self.port, self.user))
134 logger.write( src.printcolors.printcWarning(message))
135 return self._connection_successful
137 def copy_sat(self, sat_local_path, job_file):
138 '''Copy salomeTools to the remote machine in self.sat_path
142 # open a sftp connection
143 self.sftp = self.ssh.open_sftp()
144 # Create the sat directory on remote machine if it is not existing
145 self.mkdir(self.sat_path, ignore_existing=True)
147 self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
148 # put the job configuration file in order to make it reachable
149 # on the remote machine
150 remote_job_file_name = ".%s" % os.path.basename(job_file)
151 self.sftp.put(job_file, os.path.join(self.sat_path,
152 remote_job_file_name))
153 except Exception as e:
155 self._connection_successful = False
159 def put_dir(self, source, target, filters = []):
160 ''' Uploads the contents of the source directory to the target path. The
161 target directory needs to exists. All sub-directories in source are
162 created under target.
164 for item in os.listdir(source):
167 source_path = os.path.join(source, item)
168 destination_path = os.path.join(target, item)
169 if os.path.islink(source_path):
170 linkto = os.readlink(source_path)
172 self.sftp.symlink(linkto, destination_path)
173 self.sftp.chmod(destination_path,
174 os.stat(source_path).st_mode)
178 if os.path.isfile(source_path):
179 self.sftp.put(source_path, destination_path)
180 self.sftp.chmod(destination_path,
181 os.stat(source_path).st_mode)
183 self.mkdir(destination_path, ignore_existing=True)
184 self.put_dir(source_path, destination_path)
186 def mkdir(self, path, mode=511, ignore_existing=False):
187 ''' Augments mkdir by adding an option to not fail
191 self.sftp.mkdir(path, mode)
198 def exec_command(self, command, logger):
199 '''Execute the command on the remote machine
201 :param command str: The command to be run
202 :param logger src.logger.Logger: The logger instance
203 :return: the stdin, stdout, and stderr of the executing command,
205 :rtype: (paramiko.channel.ChannelFile, paramiko.channel.ChannelFile,
206 paramiko.channel.ChannelFile)
209 # Does not wait the end of the command
210 (stdin, stdout, stderr) = self.ssh.exec_command(command)
211 except paramiko.SSHException:
212 message = src.KO_STATUS + _(
213 ": the server failed to execute the command\n")
214 logger.write( src.printcolors.printcError(message))
215 return (None, None, None)
217 logger.write( src.printcolors.printcError(src.KO_STATUS + '\n'))
218 return (None, None, None)
220 return (stdin, stdout, stderr)
223 '''Close the ssh connection
229 def write_info(self, logger):
230 '''Prints the informations relative to the machine in the logger
231 (terminal traces and log file)
233 :param logger src.logger.Logger: The logger instance
237 logger.write("host : " + self.host + "\n")
238 logger.write("port : " + str(self.port) + "\n")
239 logger.write("user : " + str(self.user) + "\n")
240 if self.successfully_connected(logger):
241 status = src.OK_STATUS
243 status = src.KO_STATUS
244 logger.write("Connection : " + status + "\n\n")
248 '''Class to manage one job
264 self.machine = machine
266 self.timeout = timeout
267 self.application = application
271 # The list of log files to download from the remote machine
272 self.remote_log_files = []
274 # The remote command status
275 # -1 means that it has not been launched,
276 # 0 means success and 1 means fail
278 self.cancelled = False
282 self._has_begun = False
283 self._has_finished = False
284 self._has_timouted = False
285 self._stdin = None # Store the command inputs field
286 self._stdout = None # Store the command outputs field
287 self._stderr = None # Store the command errors field
292 self.name_remote_jobs_pyconf = ".%s" % os.path.basename(job_file_path)
293 self.commands = commands
294 self.command = (os.path.join(self.machine.sat_path, "sat") +
296 os.path.join(self.machine.sat_path,
297 "list_log_files.txt") +
298 " job --jobs_config " +
299 os.path.join(self.machine.sat_path,
300 self.name_remote_jobs_pyconf) +
301 " --name " + self.name)
303 self.command = prefix + ' "' + self.command +'"'
306 """ Get the pid(s) corresponding to the command that have been launched
307 On the remote machine
309 :return: The list of integers corresponding to the found pids
313 cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\''
314 (_, out_pid, _) = self.machine.exec_command(cmd_pid, self.logger)
315 pids_cmd = out_pid.readlines()
316 pids_cmd = [str(src.only_numbers(pid)) for pid in pids_cmd]
320 def kill_remote_process(self, wait=1):
321 '''Kills the process on the remote machine.
323 :return: (the output of the kill, the error of the kill)
327 pids = self.get_pids()
329 return ("Unable to get the pid of the command.", "")
331 cmd_kill = " ; ".join([("kill -2 " + pid) for pid in pids])
332 (_, out_kill, err_kill) = self.machine.exec_command(cmd_kill,
335 return (out_kill.read().decode(), err_kill.read().decode())
338 '''Returns True if the job has already begun
340 :return: True if the job has already begun
343 return self._has_begun
345 def has_finished(self):
346 '''Returns True if the job has already finished
347 (i.e. all the commands have been executed)
348 If it is finished, the outputs are stored in the fields out and err.
350 :return: True if the job has already finished
354 # If the method has already been called and returned True
355 if self._has_finished:
358 # If the job has not begun yet
359 if not self.has_begun():
362 if self._stdout.channel.closed:
363 self._has_finished = True
364 # Store the result outputs
365 self.out += self._stdout.read().decode()
366 self.err += self._stderr.read().decode()
368 self._Tf = time.time()
369 # And get the remote command status and log files
372 except Exception as e:
373 self.err += _("Unable to get remote log files: %s" % e)
375 return self._has_finished
377 def get_log_files(self):
378 """Get the log files produced by the command launched
379 on the remote machine, and put it in the log directory of the user,
380 so they can be accessible from
382 # Do not get the files if the command is not finished
383 if not self.has_finished():
384 msg = _("Trying to get log files whereas the job is not finished.")
385 self.logger.write(src.printcolors.printcWarning(msg))
388 # First get the file that contains the list of log files to get
389 tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
390 remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt")
391 self.machine.sftp.get(
395 # Read the file and get the result of the command and all the log files
397 fstream_tmp = open(tmp_file_path, "r")
398 file_lines = fstream_tmp.readlines()
399 file_lines = [line.replace("\n", "") for line in file_lines]
401 os.remove(tmp_file_path)
404 # The first line is the result of the command (0 success or 1 fail)
405 self.res_job = file_lines[0]
406 except Exception as e:
407 self.err += _("Unable to get status from remote file %s: %s" %
408 (remote_path, str(e)))
410 for i, job_path_remote in enumerate(file_lines[1:]):
412 # For each command, there is two files to get :
413 # 1- The xml file describing the command and giving the
415 # 2- The txt file containing the system command traces (like
416 # traces produced by the "make" command)
417 # 3- In case of the test command, there is another file to get :
418 # the xml board that contain the test results
419 dirname = os.path.basename(os.path.dirname(job_path_remote))
420 if dirname != 'OUT' and dirname != 'TEST':
422 local_path = os.path.join(os.path.dirname(
423 self.logger.logFilePath),
424 os.path.basename(job_path_remote))
425 if i==0: # The first is the job command
426 self.logger.add_link(os.path.basename(job_path_remote),
430 elif dirname == 'OUT':
432 local_path = os.path.join(os.path.dirname(
433 self.logger.logFilePath),
435 os.path.basename(job_path_remote))
436 elif dirname == 'TEST':
438 local_path = os.path.join(os.path.dirname(
439 self.logger.logFilePath),
441 os.path.basename(job_path_remote))
444 if not os.path.exists(local_path):
445 self.machine.sftp.get(job_path_remote, local_path)
446 self.remote_log_files.append(local_path)
447 except Exception as e:
448 self.err += _("Unable to get %s log file from remote: %s" %
449 (str(job_path_remote),
452 def has_failed(self):
453 '''Returns True if the job has failed.
454 A job is considered as failed if the machine could not be reached,
455 if the remote command failed,
456 or if the job finished with a time out.
458 :return: True if the job has failed
461 if not self.has_finished():
463 if not self.machine.successfully_connected(self.logger):
465 if self.is_timeout():
467 if self.res_job == "1":
472 """In case of a failing job, one has to cancel every job that depend
473 on it. This method put the job as failed and will not be executed.
477 self._has_begun = True
478 self._has_finished = True
479 self.cancelled = True
480 self.out += _("This job was not launched because its father has failed.")
481 self.err += _("This job was not launched because its father has failed.")
483 def is_running(self):
484 '''Returns True if the job commands are running
486 :return: True if the job is running
489 return self.has_begun() and not self.has_finished()
491 def is_timeout(self):
492 '''Returns True if the job commands has finished with timeout
494 :return: True if the job has finished with timeout
497 return self._has_timouted
499 def time_elapsed(self):
500 """Get the time elapsed since the job launching
502 :return: The number of seconds
505 if not self.has_begun():
508 return T_now - self._T0
510 def check_time(self):
511 """Verify that the job has not exceeded its timeout.
512 If it has, kill the remote command and consider the job as finished.
514 if not self.has_begun():
516 if self.time_elapsed() > self.timeout:
517 self._has_finished = True
518 self._has_timouted = True
519 self._Tf = time.time()
520 (out_kill, __) = self.kill_remote_process()
521 self.out += "TIMEOUT \n" + out_kill
522 self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
525 except Exception as e:
526 self.err += _("Unable to get remote log files!\n%s\n" % str(e))
528 def total_duration(self):
529 """Give the total duration of the job
531 :return: the total duration of the job in seconds
534 return self._Tf - self._T0
537 """Launch the job by executing the remote command.
540 # Prevent multiple run
542 msg = _("Warning: A job can only be launched one time")
543 msg2 = _("Trying to launch the job \"%s\" whereas it has "
544 "already been launched." % self.name)
545 self.logger.write(src.printcolors.printcWarning("%s\n%s\n" % (msg,
549 # Do not execute the command if the machine could not be reached
550 if not self.machine.successfully_connected(self.logger):
551 self._has_finished = True
553 self.err += ("Connection to machine (name : %s, host: %s, port:"
554 " %s, user: %s) has failed\nUse the log command "
555 "to get more information."
556 % (self.machine.name,
561 # Usual case : Launch the command on remote machine
562 self._T0 = time.time()
563 self._stdin, self._stdout, self._stderr = self.machine.exec_command(
566 # If the results are not initialized, finish the job
567 if (self._stdin, self._stdout, self._stderr) == (None, None, None):
568 self._has_finished = True
569 self._Tf = time.time()
571 self.err += "The server failed to execute the command"
573 # Put the beginning flag to true.
574 self._has_begun = True
576 def write_results(self):
577 """Display on the terminal all the job's information
579 self.logger.write("name : " + self.name + "\n")
581 self.logger.write("after : %s\n" % self.after)
582 self.logger.write("Time elapsed : %4imin %2is \n" %
583 (self.total_duration()//60 , self.total_duration()%60))
585 self.logger.write("Begin time : %s\n" %
586 time.strftime('%Y-%m-%d %H:%M:%S',
587 time.localtime(self._T0)) )
589 self.logger.write("End time : %s\n\n" %
590 time.strftime('%Y-%m-%d %H:%M:%S',
591 time.localtime(self._Tf)) )
593 machine_head = "Informations about connection :\n"
594 underline = (len(machine_head) - 2) * "-"
595 self.logger.write(src.printcolors.printcInfo(
596 machine_head+underline+"\n"))
597 self.machine.write_info(self.logger)
599 self.logger.write(src.printcolors.printcInfo("out : \n"))
601 self.logger.write("Unable to get output\n")
603 self.logger.write(self.out + "\n")
604 self.logger.write(src.printcolors.printcInfo("err : \n"))
605 self.logger.write(self.err + "\n")
607 def get_status(self):
608 """Get the status of the job (used by the Gui for xml display)
610 :return: The current status of the job
613 if not self.machine.successfully_connected(self.logger):
614 return "SSH connection KO"
615 if not self.has_begun():
616 return "Not launched"
619 if self.is_running():
620 return "running since " + time.strftime('%Y-%m-%d %H:%M:%S',
621 time.localtime(self._T0))
622 if self.has_finished():
623 if self.is_timeout():
624 return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S',
625 time.localtime(self._Tf))
626 return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S',
627 time.localtime(self._Tf))
630 '''Class to manage the jobs to be run
637 lenght_columns = 20):
638 # The jobs configuration
639 self.cfg_jobs = config_jobs
640 self.job_file_path = job_file_path
641 # The machine that will be used today
643 # The list of machine (hosts, port) that will be used today
644 # (a same host can have several machine instances since there
645 # can be several ssh parameters)
647 # The jobs to be launched today
649 # The jobs that will not be launched today
650 self.ljobs_not_today = []
653 self.len_columns = lenght_columns
655 # the list of jobs that have not been run yet
656 self._l_jobs_not_started = []
657 # the list of jobs that have already ran
658 self._l_jobs_finished = []
659 # the list of jobs that are running
660 self._l_jobs_running = []
662 self.determine_jobs_and_machines()
664 def define_job(self, job_def, machine):
665 '''Takes a pyconf job definition and a machine (from class machine)
666 and returns the job instance corresponding to the definition.
668 :param job_def src.config.Mapping: a job definition
669 :param machine machine: the machine on which the job will run
670 :return: The corresponding job in a job class instance
674 cmmnds = job_def.commands
675 if not "timeout" in job_def:
676 timeout = 4*60*60 # default timeout = 4h
678 timeout = job_def.timeout
680 if 'after' in job_def:
681 after = job_def.after
683 if 'application' in job_def:
684 application = job_def.application
686 if 'board' in job_def:
687 board = job_def.board
689 if "prefix" in job_def:
690 prefix = job_def.prefix
704 def determine_jobs_and_machines(self):
705 '''Function that reads the pyconf jobs definition and instantiates all
706 the machines and jobs to be done today.
711 today = datetime.date.weekday(datetime.date.today())
714 for job_def in self.cfg_jobs.jobs :
716 if not "machine" in job_def:
717 msg = _('WARNING: The job "%s" do not have the key '
718 '"machine", this job is ignored.\n\n' % job_def.name)
719 self.logger.write(src.printcolors.printcWarning(msg))
721 name_machine = job_def.machine
724 for mach in self.lmachines:
725 if mach.name == name_machine:
729 if a_machine == None:
730 for machine_def in self.cfg_jobs.machines:
731 if machine_def.name == name_machine:
732 if 'host' not in machine_def:
733 host = self.runner.cfg.VARS.hostname
735 host = machine_def.host
737 if 'user' not in machine_def:
738 user = self.runner.cfg.VARS.user
740 user = machine_def.user
742 if 'port' not in machine_def:
745 port = machine_def.port
747 if 'password' not in machine_def:
750 passwd = machine_def.password
752 if 'sat_path' not in machine_def:
753 sat_path = "salomeTools"
755 sat_path = machine_def.sat_path
766 self.lmachines.append(a_machine)
767 if (host, port) not in host_list:
768 host_list.append((host, port))
770 if a_machine == None:
771 msg = _("WARNING: The job \"%(job_name)s\" requires the "
772 "machine \"%(machine_name)s\" but this machine "
773 "is not defined in the configuration file.\n"
774 "The job will not be launched\n")
775 self.logger.write(src.printcolors.printcWarning(
776 msg % {"job_name" : job_def.name,
777 "machine_name" : name_machine}))
780 a_job = self.define_job(job_def, a_machine)
782 if today in job_def.when:
783 self.ljobs.append(a_job)
784 else: # today in job_def.when
785 self.ljobs_not_today.append(a_job)
787 self.lhosts = host_list
789 def ssh_connection_all_machines(self, pad=50):
790 '''Function that do the ssh connection to every machine
796 self.logger.write(src.printcolors.printcInfo((
797 "Establishing connection with all the machines :\n")))
798 for machine in self.lmachines:
799 # little algorithm in order to display traces
800 begin_line = (_("Connection to %s: " % machine.name))
801 if pad - len(begin_line) < 0:
804 endline = (pad - len(begin_line)) * "." + " "
806 step = "SSH connection"
807 self.logger.write( begin_line + endline + step)
809 # the call to the method that initiate the ssh connection
810 msg = machine.connect(self.logger)
812 # Copy salomeTools to the remote machine
813 if machine.successfully_connected(self.logger):
814 step = _("Remove SAT")
815 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
816 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
817 (__, out_dist, __) = machine.exec_command(
818 "rm -rf %s" % machine.sat_path,
824 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
825 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
827 res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway,
830 # set the local settings of sat on the remote machine using
832 (__, out_dist, __) = machine.exec_command(
833 os.path.join(machine.sat_path,
834 "sat init --base default --workdir"
835 " default --log_dir default"),
839 # get the remote machine distribution using a sat command
840 (__, out_dist, __) = machine.exec_command(
841 os.path.join(machine.sat_path,
842 "sat config --value VARS.dist --no_label"),
844 machine.distribution = out_dist.read().decode().replace("\n",
847 # Print the status of the copy
849 self.logger.write('\r%s' %
850 ((len(begin_line)+len(endline)+20) * " "), 3)
851 self.logger.write('\r%s%s%s' %
854 src.printcolors.printc(src.OK_STATUS)), 3)
856 self.logger.write('\r%s' %
857 ((len(begin_line)+len(endline)+20) * " "), 3)
858 self.logger.write('\r%s%s%s %s' %
861 src.printcolors.printc(src.KO_STATUS),
862 _("Copy of SAT failed: %s" % res_copy)), 3)
864 self.logger.write('\r%s' %
865 ((len(begin_line)+len(endline)+20) * " "), 3)
866 self.logger.write('\r%s%s%s %s' %
869 src.printcolors.printc(src.KO_STATUS),
871 self.logger.write("\n", 3)
873 self.logger.write("\n")
876 def is_occupied(self, hostname):
877 '''Function that returns True if a job is running on
878 the machine defined by its host and its port.
880 :param hostname (str, int): the pair (host, port)
881 :return: the job that is running on the host,
882 or false if there is no job running on the host.
887 for jb in self.ljobs:
888 if jb.machine.host == host and jb.machine.port == port:
893 def update_jobs_states_list(self):
894 '''Function that updates the lists that store the currently
895 running jobs and the jobs that have already finished.
900 jobs_finished_list = []
901 jobs_running_list = []
902 for jb in self.ljobs:
904 jobs_running_list.append(jb)
906 if jb.has_finished():
907 jobs_finished_list.append(jb)
909 nb_job_finished_before = len(self._l_jobs_finished)
910 self._l_jobs_finished = jobs_finished_list
911 self._l_jobs_running = jobs_running_list
913 nb_job_finished_now = len(self._l_jobs_finished)
915 return nb_job_finished_now > nb_job_finished_before
917 def cancel_dependencies_of_failing_jobs(self):
918 '''Function that cancels all the jobs that depend on a failing one.
924 for job in self.ljobs:
925 if job.after is None:
927 father_job = self.find_job_that_has_name(job.after)
928 if father_job is not None and father_job.has_failed():
931 def find_job_that_has_name(self, name):
932 '''Returns the job by its name.
934 :param name str: a job name
935 :return: the job that has the name.
938 for jb in self.ljobs:
941 # the following is executed only if the job was not found
944 def str_of_length(self, text, length):
945 '''Takes a string text of any length and returns
946 the most close string of length "length".
948 :param text str: any string
949 :param length int: a length for the returned string
950 :return: the most close string of length "length"
953 if len(text) > length:
954 text_out = text[:length-3] + '...'
956 diff = length - len(text)
957 before = " " * (diff//2)
958 after = " " * (diff//2 + diff%2)
959 text_out = before + text + after
963 def display_status(self, len_col):
964 '''Takes a lenght and construct the display of the current status
965 of the jobs in an array that has a column for each host.
966 It displays the job that is currently running on the host
969 :param len_col int: the size of the column
975 for host_port in self.lhosts:
976 jb = self.is_occupied(host_port)
977 if not jb: # nothing running on the host
978 empty = self.str_of_length("empty", len_col)
979 display_line += "|" + empty
981 display_line += "|" + src.printcolors.printcInfo(
982 self.str_of_length(jb.name, len_col))
984 self.logger.write("\r" + display_line + "|")
989 '''The main method. Runs all the jobs on every host.
990 For each host, at a given time, only one job can be running.
991 The jobs that have the field after (that contain the job that has
992 to be run before it) are run after the previous job.
993 This method stops when all the jobs are finished.
1000 self.logger.write(src.printcolors.printcInfo(
1001 _('Executing the jobs :\n')))
1003 for host_port in self.lhosts:
1006 if port == 22: # default value
1007 text_line += "|" + self.str_of_length(host, self.len_columns)
1009 text_line += "|" + self.str_of_length(
1010 "("+host+", "+str(port)+")", self.len_columns)
1012 tiret_line = " " + "-"*(len(text_line)-1) + "\n"
1013 self.logger.write(tiret_line)
1014 self.logger.write(text_line + "|\n")
1015 self.logger.write(tiret_line)
1018 # The infinite loop that runs the jobs
1019 l_jobs_not_started = src.deepcopy_list(self.ljobs)
1020 while len(self._l_jobs_finished) != len(self.ljobs):
1021 new_job_start = False
1022 for host_port in self.lhosts:
1024 if self.is_occupied(host_port):
1027 for jb in l_jobs_not_started:
1028 if (jb.machine.host, jb.machine.port) != host_port:
1030 if jb.after == None:
1032 l_jobs_not_started.remove(jb)
1033 new_job_start = True
1036 jb_before = self.find_job_that_has_name(jb.after)
1037 if jb_before is None:
1039 msg = _("This job was not launched because its "
1040 "father is not in the jobs list.")
1044 if jb_before.has_finished():
1046 l_jobs_not_started.remove(jb)
1047 new_job_start = True
1049 self.cancel_dependencies_of_failing_jobs()
1050 new_job_finished = self.update_jobs_states_list()
1052 if new_job_start or new_job_finished:
1054 self.gui.update_xml_files(self.ljobs)
1055 # Display the current status
1056 self.display_status(self.len_columns)
1058 # Make sure that the proc is not entirely busy
1061 self.logger.write("\n")
1062 self.logger.write(tiret_line)
1063 self.logger.write("\n\n")
1066 self.gui.update_xml_files(self.ljobs)
1067 self.gui.last_update()
1069 def write_all_results(self):
1070 '''Display all the jobs outputs.
1076 for jb in self.ljobs:
1077 self.logger.write(src.printcolors.printcLabel(
1078 "#------- Results for job %s -------#\n" % jb.name))
1080 self.logger.write("\n\n")
1083 '''Class to manage the the xml data that can be displayed in a browser to
1096 :param xml_dir_path str: The path to the directory where to put
1097 the xml resulting files
1098 :param l_jobs List: the list of jobs that run today
1099 :param l_jobs_not_today List: the list of jobs that do not run today
1100 :param file_boards str: the file path from which to read the
1103 # The logging instance
1104 self.logger = logger
1106 # The prefix to add to the xml files : date_hour
1107 self.prefix = prefix
1109 # The path of the csv files to read to fill the expected boards
1110 self.file_boards = file_boards
1112 if file_boards != "":
1113 today = datetime.date.weekday(datetime.date.today())
1114 self.parse_csv_boards(today)
1116 self.d_input_boards = {}
1118 # The path of the global xml file
1119 self.xml_dir_path = xml_dir_path
1120 # Initialize the xml files
1121 self.global_name = "global_report"
1122 xml_global_path = os.path.join(self.xml_dir_path,
1123 self.global_name + ".xml")
1124 self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path,
1127 # Find history for each job
1129 self.find_history(l_jobs, l_jobs_not_today)
1131 # The xml files that corresponds to the boards.
1132 # {name_board : xml_object}}
1133 self.d_xml_board_files = {}
1135 # Create the lines and columns
1136 self.initialize_boards(l_jobs, l_jobs_not_today)
1138 # Write the xml file
1139 self.update_xml_files(l_jobs)
1141 def add_xml_board(self, name):
1142 '''Add a board to the board list
1143 :param name str: the board name
1145 xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
1146 self.d_xml_board_files[name] = src.xmlManager.XmlLogFile(
1149 self.d_xml_board_files[name].add_simple_node("distributions")
1150 self.d_xml_board_files[name].add_simple_node("applications")
1151 self.d_xml_board_files[name].add_simple_node("board", text=name)
1153 def initialize_boards(self, l_jobs, l_jobs_not_today):
1154 '''Get all the first information needed for each file and write the
1155 first version of the files
1156 :param l_jobs List: the list of jobs that run today
1157 :param l_jobs_not_today List: the list of jobs that do not run today
1159 # Get the boards to fill and put it in a dictionary
1160 # {board_name : xml instance corresponding to the board}
1161 for job in l_jobs + l_jobs_not_today:
1163 if (board is not None and
1164 board not in self.d_xml_board_files.keys()):
1165 self.add_xml_board(board)
1167 # Verify that the boards given as input are done
1168 for board in list(self.d_input_boards.keys()):
1169 if board not in self.d_xml_board_files:
1170 self.add_xml_board(board)
1171 root_node = self.d_xml_board_files[board].xmlroot
1172 src.xmlManager.append_node_attrib(root_node,
1173 {"input_file" : self.file_boards})
1175 # Loop over all jobs in order to get the lines and columns for each
1179 for board in self.d_xml_board_files:
1181 d_application[board] = []
1185 for job in l_jobs + l_jobs_not_today:
1187 if (job.machine.host, job.machine.port) not in l_hosts_ports:
1188 l_hosts_ports.append((job.machine.host, job.machine.port))
1190 distrib = job.machine.distribution
1191 application = job.application
1193 board_job = job.board
1196 for board in self.d_xml_board_files:
1197 if board_job == board:
1198 if (distrib not in [None, ''] and
1199 distrib not in d_dist[board]):
1200 d_dist[board].append(distrib)
1201 src.xmlManager.add_simple_node(
1202 self.d_xml_board_files[board].xmlroot.find(
1205 attrib={"name" : distrib})
1207 if board_job == board:
1208 if (application not in [None, ''] and
1209 application not in d_application[board]):
1210 d_application[board].append(application)
1211 src.xmlManager.add_simple_node(
1212 self.d_xml_board_files[board].xmlroot.find(
1216 "name" : application})
1218 # Verify that there are no missing application or distribution in the
1219 # xml board files (regarding the input boards)
1220 for board in self.d_xml_board_files:
1221 l_dist = d_dist[board]
1222 if board not in self.d_input_boards.keys():
1224 for dist in self.d_input_boards[board]["rows"]:
1225 if dist not in l_dist:
1226 src.xmlManager.add_simple_node(
1227 self.d_xml_board_files[board].xmlroot.find(
1230 attrib={"name" : dist})
1231 l_appli = d_application[board]
1232 for appli in self.d_input_boards[board]["columns"]:
1233 if appli not in l_appli:
1234 src.xmlManager.add_simple_node(
1235 self.d_xml_board_files[board].xmlroot.find(
1238 attrib={"name" : appli})
1240 # Initialize the hosts_ports node for the global file
1241 self.xmlhosts_ports = self.xml_global_file.add_simple_node(
1243 for host, port in l_hosts_ports:
1244 host_port = "%s:%i" % (host, port)
1245 src.xmlManager.add_simple_node(self.xmlhosts_ports,
1247 attrib={"name" : host_port})
1249 # Initialize the jobs node in all files
1250 for xml_file in [self.xml_global_file] + list(
1251 self.d_xml_board_files.values()):
1252 xml_jobs = xml_file.add_simple_node("jobs")
1253 # Get the jobs present in the config file but
1254 # that will not be launched today
1255 self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
1257 # add also the infos node
1258 xml_file.add_simple_node("infos",
1259 attrib={"name" : "last update",
1260 "JobsCommandStatus" : "running"})
1262 # and put the history node
1263 history_node = xml_file.add_simple_node("history")
1264 name_board = os.path.basename(xml_file.logFile)[:-len(".xml")]
1265 # serach for board files
1266 expression = "^[0-9]{8}_+[0-9]{6}_" + name_board + ".xml$"
1267 oExpr = re.compile(expression)
1268 # Get the list of xml borad files that are in the log directory
1269 for file_name in os.listdir(self.xml_dir_path):
1270 if oExpr.search(file_name):
1271 date = os.path.basename(file_name).split("_")[0]
1272 file_path = os.path.join(self.xml_dir_path, file_name)
1273 src.xmlManager.add_simple_node(history_node,
1276 attrib={"date" : date})
1279 # Find in each board the squares that needs to be filled regarding the
1280 # input csv files but that are not covered by a today job
1281 for board in self.d_input_boards.keys():
1282 xml_root_board = self.d_xml_board_files[board].xmlroot
1283 # Find the missing jobs for today
1284 xml_missing = src.xmlManager.add_simple_node(xml_root_board,
1286 for row, column in self.d_input_boards[board]["jobs"]:
1289 if (job.application == column and
1290 job.machine.distribution == row):
1294 src.xmlManager.add_simple_node(xml_missing,
1296 attrib={"distribution" : row,
1297 "application" : column })
1298 # Find the missing jobs not today
1299 xml_missing_not_today = src.xmlManager.add_simple_node(
1301 "missing_jobs_not_today")
1302 for row, column in self.d_input_boards[board]["jobs_not_today"]:
1304 for job in l_jobs_not_today:
1305 if (job.application == column and
1306 job.machine.distribution == row):
1310 src.xmlManager.add_simple_node(xml_missing_not_today,
1312 attrib={"distribution" : row,
1313 "application" : column })
1315 def find_history(self, l_jobs, l_jobs_not_today):
1316 """find, for each job, in the existent xml boards the results for the
1317 job. Store the results in the dictionnary self.history = {name_job :
1318 list of (date, status, list links)}
1320 :param l_jobs List: the list of jobs to run today
1321 :param l_jobs_not_today List: the list of jobs that do not run today
1323 # load the all the history
1324 expression = "^[0-9]{8}_+[0-9]{6}_" + self.global_name + ".xml$"
1325 oExpr = re.compile(expression)
1326 # Get the list of global xml that are in the log directory
1328 for file_name in os.listdir(self.xml_dir_path):
1329 if oExpr.search(file_name):
1330 file_path = os.path.join(self.xml_dir_path, file_name)
1332 global_xml = src.xmlManager.ReadXmlFile(file_path)
1333 l_globalxml.append(global_xml)
1334 except Exception as e:
1335 msg = _("\nWARNING: the file %s can not be read, it will be "
1336 "ignored\n%s" % (file_path, e))
1337 self.logger.write("%s\n" % src.printcolors.printcWarning(
1340 # Construct the dictionnary self.history
1341 for job in l_jobs + l_jobs_not_today:
1343 for global_xml in l_globalxml:
1344 date = os.path.basename(global_xml.filePath).split("_")[0]
1345 global_root_node = global_xml.xmlroot.find("jobs")
1346 job_node = src.xmlManager.find_node_by_attrib(
1352 if job_node.find("remote_log_file_path") is not None:
1353 link = job_node.find("remote_log_file_path").text
1354 res_job = job_node.find("res").text
1355 if link != "nothing":
1356 l_links.append((date, res_job, link))
1357 l_links = sorted(l_links, reverse=True)
1358 self.history[job.name] = l_links
1360 def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
1361 '''Get all the first information needed for each file and write the
1362 first version of the files
1364 :param xml_node_jobs etree.Element: the node corresponding to a job
1365 :param l_jobs_not_today List: the list of jobs that do not run today
1367 for job in l_jobs_not_today:
1368 xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1370 attrib={"name" : job.name})
1371 src.xmlManager.add_simple_node(xmlj, "application", job.application)
1372 src.xmlManager.add_simple_node(xmlj,
1374 job.machine.distribution)
1375 src.xmlManager.add_simple_node(xmlj, "board", job.board)
1376 src.xmlManager.add_simple_node(xmlj,
1377 "commands", " ; ".join(job.commands))
1378 src.xmlManager.add_simple_node(xmlj, "state", "Not today")
1379 src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1380 src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1381 src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1382 src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1383 src.xmlManager.add_simple_node(xmlj, "sat_path",
1384 job.machine.sat_path)
1385 xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1386 for i, (date, res_job, link) in enumerate(self.history[job.name]):
1388 # tag the first one (the last one)
1389 src.xmlManager.add_simple_node(xml_history,
1392 attrib={"date" : date,
1396 src.xmlManager.add_simple_node(xml_history,
1399 attrib={"date" : date,
1403 def parse_csv_boards(self, today):
1404 """ Parse the csv file that describes the boards to produce and fill
1405 the dict d_input_boards that contain the csv file contain
1407 :param today int: the current day of the week
1409 # open the csv file and read its content
1411 with open(self.file_boards, 'r') as f:
1412 reader = csv.reader(f,delimiter=CSV_DELIMITER)
1415 # get the delimiter for the boards (empty line)
1416 boards_delimiter = [''] * len(l_read[0])
1417 # Make the list of boards, by splitting with the delimiter
1418 l_boards = [list(y) for x, y in itertools.groupby(l_read,
1419 lambda z: z == boards_delimiter) if not x]
1421 # loop over the csv lists of lines and get the rows, columns and jobs
1423 for input_board in l_boards:
1425 board_name = input_board[0][0]
1428 columns = input_board[0][1:]
1433 for line in input_board[1:]:
1436 for i, square in enumerate(line[1:]):
1439 days = square.split(DAYS_SEPARATOR)
1440 days = [int(day) for day in days]
1441 job = (row, columns[i])
1445 jobs_not_today.append(job)
1447 d_boards[board_name] = {"rows" : rows,
1448 "columns" : columns,
1450 "jobs_not_today" : jobs_not_today}
1452 self.d_input_boards = d_boards
1454 def update_xml_files(self, l_jobs):
1455 '''Write all the xml files with updated information about the jobs
1457 :param l_jobs List: the list of jobs that run today
1459 for xml_file in [self.xml_global_file] + list(
1460 self.d_xml_board_files.values()):
1461 self.update_xml_file(l_jobs, xml_file)
1464 self.write_xml_files()
1466 def update_xml_file(self, l_jobs, xml_file):
1467 '''update information about the jobs for the file xml_file
1469 :param l_jobs List: the list of jobs that run today
1470 :param xml_file xmlManager.XmlLogFile: the xml instance to update
1473 xml_node_jobs = xml_file.xmlroot.find('jobs')
1474 # Update the job names and status node
1476 # Find the node corresponding to the job and delete it
1477 # in order to recreate it
1478 for xmljob in xml_node_jobs.findall('job'):
1479 if xmljob.attrib['name'] == job.name:
1480 xml_node_jobs.remove(xmljob)
1484 T0 = time.strftime('%Y-%m-%d %H:%M:%S',
1485 time.localtime(job._T0))
1488 Tf = time.strftime('%Y-%m-%d %H:%M:%S',
1489 time.localtime(job._Tf))
1491 # recreate the job node
1492 xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1494 attrib={"name" : job.name})
1495 src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1496 src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1497 src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1498 src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1499 xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1500 for date, res_job, link in self.history[job.name]:
1501 src.xmlManager.add_simple_node(xml_history,
1504 attrib={"date" : date,
1507 src.xmlManager.add_simple_node(xmlj, "sat_path",
1508 job.machine.sat_path)
1509 src.xmlManager.add_simple_node(xmlj, "application", job.application)
1510 src.xmlManager.add_simple_node(xmlj, "distribution",
1511 job.machine.distribution)
1512 src.xmlManager.add_simple_node(xmlj, "board", job.board)
1513 src.xmlManager.add_simple_node(xmlj, "timeout", str(job.timeout))
1514 src.xmlManager.add_simple_node(xmlj, "commands",
1515 " ; ".join(job.commands))
1516 src.xmlManager.add_simple_node(xmlj, "state", job.get_status())
1517 src.xmlManager.add_simple_node(xmlj, "begin", T0)
1518 src.xmlManager.add_simple_node(xmlj, "end", Tf)
1519 src.xmlManager.add_simple_node(xmlj, "out",
1520 src.printcolors.cleancolor(job.out))
1521 src.xmlManager.add_simple_node(xmlj, "err",
1522 src.printcolors.cleancolor(job.err))
1523 src.xmlManager.add_simple_node(xmlj, "res", str(job.res_job))
1524 if len(job.remote_log_files) > 0:
1525 src.xmlManager.add_simple_node(xmlj,
1526 "remote_log_file_path",
1527 job.remote_log_files[0])
1529 src.xmlManager.add_simple_node(xmlj,
1530 "remote_log_file_path",
1532 # Search for the test log if there is any
1533 l_test_log_files = self.find_test_log(job.remote_log_files)
1534 xml_test = src.xmlManager.add_simple_node(xmlj,
1535 "test_log_file_path")
1536 for test_log_path, res_test, nb_fails in l_test_log_files:
1537 test_path_node = src.xmlManager.add_simple_node(xml_test,
1540 test_path_node.attrib["res"] = res_test
1541 test_path_node.attrib["nb_fails"] = nb_fails
1543 xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
1544 # get the job father
1545 if job.after is not None:
1548 if jb.name == job.after:
1551 if (job_father is not None and
1552 len(job_father.remote_log_files) > 0):
1553 link = job_father.remote_log_files[0]
1556 src.xmlManager.append_node_attrib(xmlafter, {"link" : link})
1558 # Verify that the job is to be done today regarding the input csv
1560 if job.board and job.board in self.d_input_boards.keys():
1562 for dist, appli in self.d_input_boards[job.board]["jobs"]:
1563 if (job.machine.distribution == dist
1564 and job.application == appli):
1566 src.xmlManager.add_simple_node(xmlj,
1571 src.xmlManager.add_simple_node(xmlj,
1577 xml_node_infos = xml_file.xmlroot.find('infos')
1578 src.xmlManager.append_node_attrib(xml_node_infos,
1580 datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
1583 def find_test_log(self, l_remote_log_files):
1584 '''Find if there is a test log (board) in the remote log files and
1585 the path to it. There can be several test command, so the result is
1588 :param l_remote_log_files List: the list of all remote log files
1589 :return: the list of (test log files path, res of the command)
1593 for file_path in l_remote_log_files:
1594 dirname = os.path.basename(os.path.dirname(file_path))
1595 file_name = os.path.basename(file_path)
1596 regex = src.logger.log_all_command_file_expression
1597 oExpr = re.compile(regex)
1598 if dirname == "TEST" and oExpr.search(file_name):
1599 # find the res of the command
1600 prod_node = etree.parse(file_path).getroot().find("product")
1601 res_test = prod_node.attrib["global_res"]
1602 # find the number of fails
1603 testbase_node = prod_node.find("tests").find("testbase")
1604 nb_fails = int(testbase_node.attrib["failed"])
1605 # put the file path, the res of the test command and the number
1606 # of fails in the output
1607 res.append((file_path, res_test, nb_fails))
1611 def last_update(self, finish_status = "finished"):
1612 '''update information about the jobs for the file xml_file
1614 :param l_jobs List: the list of jobs that run today
1615 :param xml_file xmlManager.XmlLogFile: the xml instance to update
1617 for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
1618 xml_node_infos = xml_file.xmlroot.find('infos')
1619 src.xmlManager.append_node_attrib(xml_node_infos,
1620 attrib={"JobsCommandStatus" : finish_status})
1622 self.write_xml_files()
1624 def write_xml_file(self, xml_file, stylesheet):
1625 ''' Write one xml file and the same file with prefix
1627 xml_file.write_tree(stylesheet)
1628 file_path = xml_file.logFile
1629 file_dir = os.path.dirname(file_path)
1630 file_name = os.path.basename(file_path)
1631 file_name_with_prefix = self.prefix + "_" + file_name
1632 xml_file.write_tree(stylesheet, os.path.join(file_dir,
1633 file_name_with_prefix))
1635 def write_xml_files(self):
1636 ''' Write the xml files
1638 self.write_xml_file(self.xml_global_file, STYLESHEET_GLOBAL)
1639 for xml_file in self.d_xml_board_files.values():
1640 self.write_xml_file(xml_file, STYLESHEET_BOARD)
1642 def get_config_file_path(job_config_name, l_cfg_dir):
1644 file_jobs_cfg = None
1645 if os.path.exists(job_config_name) and job_config_name.endswith(".pyconf"):
1647 file_jobs_cfg = job_config_name
1649 for cfg_dir in l_cfg_dir:
1650 file_jobs_cfg = os.path.join(cfg_dir, job_config_name)
1651 if not file_jobs_cfg.endswith('.pyconf'):
1652 file_jobs_cfg += '.pyconf'
1654 if not os.path.exists(file_jobs_cfg):
1659 return found, file_jobs_cfg
1661 def develop_factorized_jobs(config_jobs):
1662 '''update information about the jobs for the file xml_file
1664 :param config_jobs Config: the config corresponding to the jos description
1666 developed_jobs_list = []
1667 for jb in config_jobs.jobs:
1668 # case where the jobs are not developed
1669 if type(jb.machine) == type(""):
1670 developed_jobs_list.append(jb)
1672 # Case where the jobs must be developed
1674 # machine : ["CO7.2 physique", ["CO6.4 physique", $MONDAY, $TUESDAY ], "FD22"]
1676 for machine in jb.machine:
1677 new_job = src.pyconf.deepCopyMapping(jb)
1678 # case where there is a jobs on the machine corresponding to all
1679 # days in when variable.
1680 if type(machine) == type(""):
1681 new_job.machine = machine
1682 new_job.name = name_job + " / " + machine
1684 # case the days are re defined
1685 new_job.machine = machine[0]
1686 new_job.name = name_job + " / " + machine[0]
1687 new_job.when = machine[1:]
1688 developed_jobs_list.append(new_job)
1690 config_jobs.jobs = developed_jobs_list
1694 # Describes the command
1696 return _("The jobs command launches maintenances that are described"
1697 " in the dedicated jobs configuration file.\n\nexample:\nsat "
1698 "jobs --name my_jobs --publish")
1702 def run(args, runner, logger):
1704 (options, args) = parser.parse_args(args)
1706 l_cfg_dir = runner.cfg.PATHS.JOBPATH
1708 # list option : display all the available config files
1710 for cfg_dir in l_cfg_dir:
1711 if not options.no_label:
1712 logger.write("------ %s\n" %
1713 src.printcolors.printcHeader(cfg_dir))
1714 if not os.path.exists(cfg_dir):
1716 for f in sorted(os.listdir(cfg_dir)):
1717 if not f.endswith('.pyconf'):
1720 logger.write("%s\n" % cfilename)
1723 # Make sure the jobs_config option has been called
1724 if not options.jobs_cfg:
1725 message = _("The option --jobs_config is required\n")
1726 src.printcolors.printcError(message)
1729 # Find the file in the directories, unless it is a full path
1730 # merge all in a config
1731 merger = src.pyconf.ConfigMerger()
1732 config_jobs = src.pyconf.Config()
1733 l_conf_files_path = []
1734 for config_file in options.jobs_cfg:
1735 found, file_jobs_cfg = get_config_file_path(config_file, l_cfg_dir)
1737 msg = _("The file configuration %s was not found."
1738 "\nUse the --list option to get the "
1739 "possible files." % config_file)
1740 logger.write("%s\n" % src.printcolors.printcError(msg), 1)
1742 l_conf_files_path.append(file_jobs_cfg)
1743 # Read the config that is in the file
1744 one_config_jobs = src.read_config_from_a_file(file_jobs_cfg)
1745 merger.merge(config_jobs, one_config_jobs)
1748 (_("Platform"), runner.cfg.VARS.dist),
1749 (_("Files containing the jobs configuration"), l_conf_files_path)
1751 src.print_info(logger, info)
1753 if options.only_jobs:
1754 l_jb = src.pyconf.Sequence()
1755 for jb in config_jobs.jobs:
1756 if jb.name in options.only_jobs:
1758 "Job that was given in only_jobs option parameters\n")
1759 config_jobs.jobs = l_jb
1761 # Parse the config jobs in order to develop all the factorized jobs
1762 develop_factorized_jobs(config_jobs)
1764 # Make a unique file that contain all the jobs in order to use it
1766 name_pyconf = "_".join([os.path.basename(path)[:-len('.pyconf')]
1767 for path in l_conf_files_path]) + ".pyconf"
1768 path_pyconf = src.get_tmp_filename(runner.cfg, name_pyconf)
1770 f = file( path_pyconf , 'w')
1771 config_jobs.__save__(f)
1773 # log the paramiko problems
1774 log_dir = src.get_log_path(runner.cfg)
1775 paramiko_log_dir_path = os.path.join(log_dir, "JOBS")
1776 src.ensure_path_exists(paramiko_log_dir_path)
1777 paramiko.util.log_to_file(os.path.join(paramiko_log_dir_path,
1778 logger.txtFileName))
1781 today_jobs = Jobs(runner,
1786 # SSH connection to all machines
1787 today_jobs.ssh_connection_all_machines()
1788 if options.test_connection:
1793 logger.write(src.printcolors.printcInfo(
1794 _("Initialize the xml boards : ")), 5)
1797 # Copy the stylesheets in the log directory
1799 xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
1801 files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
1802 files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD))
1803 files_to_copy.append(os.path.join(xsl_dir, "command.xsl"))
1804 files_to_copy.append(os.path.join(xsl_dir, "running.gif"))
1805 for file_path in files_to_copy:
1806 # OP We use copy instead of copy2 to update the creation date
1807 # So we can clean the LOGS directories easily
1808 shutil.copy(file_path, log_dir)
1810 # Instanciate the Gui in order to produce the xml files that contain all
1814 today_jobs.ljobs_not_today,
1815 runner.cfg.VARS.datehour,
1817 file_boards = options.input_boards)
1819 logger.write(src.printcolors.printcSuccess("OK"), 5)
1820 logger.write("\n\n", 5)
1823 # Display the list of the xml files
1824 logger.write(src.printcolors.printcInfo(("Here is the list of published"
1826 logger.write("%s\n" % gui.xml_global_file.logFile, 4)
1827 for board in gui.d_xml_board_files.keys():
1828 file_path = gui.d_xml_board_files[board].logFile
1829 file_name = os.path.basename(file_path)
1830 logger.write("%s\n" % file_path, 4)
1831 logger.add_link(file_name, "board", 0, board)
1833 logger.write("\n", 4)
1835 today_jobs.gui = gui
1839 # Run all the jobs contained in config_jobs
1840 today_jobs.run_jobs()
1841 except KeyboardInterrupt:
1843 logger.write("\n\n%s\n\n" %
1844 (src.printcolors.printcWarning(_("Forced interruption"))), 1)
1845 except Exception as e:
1846 msg = _("CRITICAL ERROR: The jobs loop has been interrupted\n")
1847 logger.write("\n\n%s\n" % src.printcolors.printcError(msg) )
1848 logger.write("%s\n" % str(e))
1850 __, __, exc_traceback = sys.exc_info()
1851 fp = tempfile.TemporaryFile()
1852 traceback.print_tb(exc_traceback, file=fp)
1855 logger.write("\nTRACEBACK: %s\n" % stack.replace('"',"'"), 1)
1861 msg = _("Killing the running jobs and trying"
1862 " to get the corresponding logs\n")
1863 logger.write(src.printcolors.printcWarning(msg))
1865 # find the potential not finished jobs and kill them
1866 for jb in today_jobs.ljobs:
1867 if not jb.has_finished():
1870 jb.kill_remote_process()
1871 except Exception as e:
1872 msg = _("Failed to kill job %s: %s\n" % (jb.name, e))
1873 logger.write(src.printcolors.printcWarning(msg))
1874 if jb.res_job != "0":
1878 today_jobs.gui.last_update(_("Forced interruption"))
1881 today_jobs.gui.last_update()
1882 # Output the results
1883 today_jobs.write_all_results()
1884 # Remove the temporary pyconf file
1885 if os.path.exists(path_pyconf):
1886 os.remove(path_pyconf)