3 # Copyright (C) 2010-2013 CEA/DEN
5 # This library is free software; you can redistribute it and/or
6 # modify it under the terms of the GNU Lesser General Public
7 # License as published by the Free Software Foundation; either
8 # version 2.1 of the License.
10 # This library is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 # Lesser General Public License for more details.
15 # You should have received a copy of the GNU Lesser General Public
16 # License along with this library; if not, write to the Free Software
17 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
34 paramiko = "import paramiko impossible"
40 import src.ElementTree as etree
42 STYLESHEET_GLOBAL = "jobs_global_report.xsl"
43 STYLESHEET_BOARD = "jobs_board_report.xsl"
48 parser = src.options.Options()
50 parser.add_option('n', 'name', 'list2', 'jobs_cfg',
51 _('Mandatory: The name of the config file that contains'
52 ' the jobs configuration. Can be a list.'))
53 parser.add_option('o', 'only_jobs', 'list2', 'only_jobs',
54 _('Optional: the list of jobs to launch, by their name. '))
55 parser.add_option('l', 'list', 'boolean', 'list',
56 _('Optional: list all available config files.'))
57 parser.add_option('t', 'test_connection', 'boolean', 'test_connection',
58 _("Optional: try to connect to the machines. "
59 "Not executing the jobs."),
61 parser.add_option('p', 'publish', 'boolean', 'publish',
62 _("Optional: generate an xml file that can be read in a "
63 "browser to display the jobs status."),
65 parser.add_option('i', 'input_boards', 'string', 'input_boards', _("Optional: "
66 "the path to csv file that contain "
67 "the expected boards."),"")
68 parser.add_option('', 'completion', 'boolean', 'no_label',
69 _("Optional (internal use): do not print labels, Works only "
73 class Machine(object):
74 '''Class to manage a ssh connection on a machine
82 sat_path="salomeTools"):
86 self.distribution = None # Will be filled after copying SAT on the machine
88 self.password = passwd
89 self.sat_path = sat_path
90 self.ssh = paramiko.SSHClient()
91 self._connection_successful = None
93 def connect(self, logger):
94 '''Initiate the ssh connection to the remote machine
96 :param logger src.logger.Logger: The logger instance
101 self._connection_successful = False
102 self.ssh.load_system_host_keys()
103 self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
105 self.ssh.connect(self.host,
108 password = self.password)
109 except paramiko.AuthenticationException:
110 message = src.KO_STATUS + _("Authentication failed")
111 except paramiko.BadHostKeyException:
112 message = (src.KO_STATUS +
113 _("The server's host key could not be verified"))
114 except paramiko.SSHException:
115 message = ( _("SSHException error connecting or "
116 "establishing an SSH session"))
118 message = ( _("Error connecting or establishing an SSH session"))
120 self._connection_successful = True
124 def successfully_connected(self, logger):
125 '''Verify if the connection to the remote machine has succeed
127 :param logger src.logger.Logger: The logger instance
128 :return: True if the connection has succeed, False if not
131 if self._connection_successful == None:
132 message = _("Warning : trying to ask if the connection to "
133 "(name: %s host: %s, port: %s, user: %s) is OK whereas there were"
134 " no connection request" %
135 (self.name, self.host, self.port, self.user))
136 logger.write( src.printcolors.printcWarning(message))
137 return self._connection_successful
139 def copy_sat(self, sat_local_path, job_file):
140 '''Copy salomeTools to the remote machine in self.sat_path
144 # open a sftp connection
145 self.sftp = self.ssh.open_sftp()
146 # Create the sat directory on remote machine if it is not existing
147 self.mkdir(self.sat_path, ignore_existing=True)
149 self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
150 # put the job configuration file in order to make it reachable
151 # on the remote machine
152 remote_job_file_name = ".%s" % os.path.basename(job_file)
153 self.sftp.put(job_file, os.path.join(self.sat_path,
154 remote_job_file_name))
155 except Exception as e:
157 self._connection_successful = False
161 def put_dir(self, source, target, filters = []):
162 ''' Uploads the contents of the source directory to the target path. The
163 target directory needs to exists. All sub-directories in source are
164 created under target.
166 for item in os.listdir(source):
169 source_path = os.path.join(source, item)
170 destination_path = os.path.join(target, item)
171 if os.path.islink(source_path):
172 linkto = os.readlink(source_path)
174 self.sftp.symlink(linkto, destination_path)
175 self.sftp.chmod(destination_path,
176 os.stat(source_path).st_mode)
180 if os.path.isfile(source_path):
181 self.sftp.put(source_path, destination_path)
182 self.sftp.chmod(destination_path,
183 os.stat(source_path).st_mode)
185 self.mkdir(destination_path, ignore_existing=True)
186 self.put_dir(source_path, destination_path)
188 def mkdir(self, path, mode=511, ignore_existing=False):
189 ''' Augments mkdir by adding an option to not fail
193 self.sftp.mkdir(path, mode)
200 def exec_command(self, command, logger):
201 '''Execute the command on the remote machine
203 :param command str: The command to be run
204 :param logger src.logger.Logger: The logger instance
205 :return: the stdin, stdout, and stderr of the executing command,
207 :rtype: (paramiko.channel.ChannelFile, paramiko.channel.ChannelFile,
208 paramiko.channel.ChannelFile)
211 # Does not wait the end of the command
212 (stdin, stdout, stderr) = self.ssh.exec_command(command)
213 except paramiko.SSHException:
214 message = src.KO_STATUS + _(
215 ": the server failed to execute the command\n")
216 logger.write( src.printcolors.printcError(message))
217 return (None, None, None)
219 logger.write( src.printcolors.printcError(src.KO_STATUS + '\n'))
220 return (None, None, None)
222 return (stdin, stdout, stderr)
225 '''Close the ssh connection
231 def write_info(self, logger):
232 '''Prints the informations relative to the machine in the logger
233 (terminal traces and log file)
235 :param logger src.logger.Logger: The logger instance
239 logger.write("host : " + self.host + "\n")
240 logger.write("port : " + str(self.port) + "\n")
241 logger.write("user : " + str(self.user) + "\n")
242 if self.successfully_connected(logger):
243 status = src.OK_STATUS
245 status = src.KO_STATUS
246 logger.write("Connection : " + status + "\n\n")
250 '''Class to manage one job
266 self.machine = machine
268 self.timeout = timeout
269 self.application = application
273 # The list of log files to download from the remote machine
274 self.remote_log_files = []
276 # The remote command status
277 # -1 means that it has not been launched,
278 # 0 means success and 1 means fail
280 self.cancelled = False
284 self._has_begun = False
285 self._has_finished = False
286 self._has_timouted = False
287 self._stdin = None # Store the command inputs field
288 self._stdout = None # Store the command outputs field
289 self._stderr = None # Store the command errors field
294 self.name_remote_jobs_pyconf = ".%s" % os.path.basename(job_file_path)
295 self.commands = commands
296 self.command = (os.path.join(self.machine.sat_path, "sat") +
298 os.path.join(self.machine.sat_path,
299 "list_log_files.txt") +
300 " job --jobs_config " +
301 os.path.join(self.machine.sat_path,
302 self.name_remote_jobs_pyconf) +
303 " --name " + self.name)
305 self.command = prefix + ' "' + self.command +'"'
308 """ Get the pid(s) corresponding to the command that have been launched
309 On the remote machine
311 :return: The list of integers corresponding to the found pids
315 cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\''
316 (_, out_pid, _) = self.machine.exec_command(cmd_pid, self.logger)
317 pids_cmd = out_pid.readlines()
318 pids_cmd = [str(src.only_numbers(pid)) for pid in pids_cmd]
322 def kill_remote_process(self, wait=1):
323 '''Kills the process on the remote machine.
325 :return: (the output of the kill, the error of the kill)
329 pids = self.get_pids()
331 return ("Unable to get the pid of the command.", "")
333 cmd_kill = " ; ".join([("kill -2 " + pid) for pid in pids])
334 (_, out_kill, err_kill) = self.machine.exec_command(cmd_kill,
337 return (out_kill.read().decode(), err_kill.read().decode())
340 '''Returns True if the job has already begun
342 :return: True if the job has already begun
345 return self._has_begun
347 def has_finished(self):
348 '''Returns True if the job has already finished
349 (i.e. all the commands have been executed)
350 If it is finished, the outputs are stored in the fields out and err.
352 :return: True if the job has already finished
356 # If the method has already been called and returned True
357 if self._has_finished:
360 # If the job has not begun yet
361 if not self.has_begun():
364 if self._stdout.channel.closed:
365 self._has_finished = True
366 # Store the result outputs
367 self.out += self._stdout.read().decode()
368 self.err += self._stderr.read().decode()
370 self._Tf = time.time()
371 # And get the remote command status and log files
374 except Exception as e:
375 self.err += _("Unable to get remote log files: %s" % e)
377 return self._has_finished
379 def get_log_files(self):
380 """Get the log files produced by the command launched
381 on the remote machine, and put it in the log directory of the user,
382 so they can be accessible from
384 # Do not get the files if the command is not finished
385 if not self.has_finished():
386 msg = _("Trying to get log files whereas the job is not finished.")
387 self.logger.write(src.printcolors.printcWarning(msg))
390 # First get the file that contains the list of log files to get
391 tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
392 remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt")
393 self.machine.sftp.get(
397 # Read the file and get the result of the command and all the log files
399 fstream_tmp = open(tmp_file_path, "r")
400 file_lines = fstream_tmp.readlines()
401 file_lines = [line.replace("\n", "") for line in file_lines]
403 os.remove(tmp_file_path)
406 # The first line is the result of the command (0 success or 1 fail)
407 self.res_job = file_lines[0]
408 except Exception as e:
409 self.err += _("Unable to get status from remote file %s: %s" %
410 (remote_path, str(e)))
412 for i, job_path_remote in enumerate(file_lines[1:]):
414 # For each command, there is two files to get :
415 # 1- The xml file describing the command and giving the
417 # 2- The txt file containing the system command traces (like
418 # traces produced by the "make" command)
419 # 3- In case of the test command, there is another file to get :
420 # the xml board that contain the test results
421 dirname = os.path.basename(os.path.dirname(job_path_remote))
422 if dirname != 'OUT' and dirname != 'TEST':
424 local_path = os.path.join(os.path.dirname(
425 self.logger.logFilePath),
426 os.path.basename(job_path_remote))
427 if i==0: # The first is the job command
428 self.logger.add_link(os.path.basename(job_path_remote),
432 elif dirname == 'OUT':
434 local_path = os.path.join(os.path.dirname(
435 self.logger.logFilePath),
437 os.path.basename(job_path_remote))
438 elif dirname == 'TEST':
440 local_path = os.path.join(os.path.dirname(
441 self.logger.logFilePath),
443 os.path.basename(job_path_remote))
446 if not os.path.exists(local_path):
447 self.machine.sftp.get(job_path_remote, local_path)
448 self.remote_log_files.append(local_path)
449 except Exception as e:
450 self.err += _("Unable to get %s log file from remote: %s" %
451 (str(job_path_remote),
454 def has_failed(self):
455 '''Returns True if the job has failed.
456 A job is considered as failed if the machine could not be reached,
457 if the remote command failed,
458 or if the job finished with a time out.
460 :return: True if the job has failed
463 if not self.has_finished():
465 if not self.machine.successfully_connected(self.logger):
467 if self.is_timeout():
469 if self.res_job == "1":
474 """In case of a failing job, one has to cancel every job that depend
475 on it. This method put the job as failed and will not be executed.
479 self._has_begun = True
480 self._has_finished = True
481 self.cancelled = True
482 self.out += _("This job was not launched because its father has failed.")
483 self.err += _("This job was not launched because its father has failed.")
485 def is_running(self):
486 '''Returns True if the job commands are running
488 :return: True if the job is running
491 return self.has_begun() and not self.has_finished()
493 def is_timeout(self):
494 '''Returns True if the job commands has finished with timeout
496 :return: True if the job has finished with timeout
499 return self._has_timouted
501 def time_elapsed(self):
502 """Get the time elapsed since the job launching
504 :return: The number of seconds
507 if not self.has_begun():
510 return T_now - self._T0
512 def check_time(self):
513 """Verify that the job has not exceeded its timeout.
514 If it has, kill the remote command and consider the job as finished.
516 if not self.has_begun():
518 if self.time_elapsed() > self.timeout:
519 self._has_finished = True
520 self._has_timouted = True
521 self._Tf = time.time()
522 (out_kill, __) = self.kill_remote_process()
523 self.out += "TIMEOUT \n" + out_kill
524 self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
527 except Exception as e:
528 self.err += _("Unable to get remote log files!\n%s\n" % str(e))
530 def total_duration(self):
531 """Give the total duration of the job
533 :return: the total duration of the job in seconds
536 return self._Tf - self._T0
539 """Launch the job by executing the remote command.
542 # Prevent multiple run
544 msg = _("Warning: A job can only be launched one time")
545 msg2 = _("Trying to launch the job \"%s\" whereas it has "
546 "already been launched." % self.name)
547 self.logger.write(src.printcolors.printcWarning("%s\n%s\n" % (msg,
551 # Do not execute the command if the machine could not be reached
552 if not self.machine.successfully_connected(self.logger):
553 self._has_finished = True
555 self.err += ("Connection to machine (name : %s, host: %s, port:"
556 " %s, user: %s) has failed\nUse the log command "
557 "to get more information."
558 % (self.machine.name,
563 # Usual case : Launch the command on remote machine
564 self._T0 = time.time()
565 self._stdin, self._stdout, self._stderr = self.machine.exec_command(
568 # If the results are not initialized, finish the job
569 if (self._stdin, self._stdout, self._stderr) == (None, None, None):
570 self._has_finished = True
571 self._Tf = time.time()
573 self.err += "The server failed to execute the command"
575 # Put the beginning flag to true.
576 self._has_begun = True
578 def write_results(self):
579 """Display on the terminal all the job's information
581 self.logger.write("name : " + self.name + "\n")
583 self.logger.write("after : %s\n" % self.after)
584 self.logger.write("Time elapsed : %4imin %2is \n" %
585 (self.total_duration()//60 , self.total_duration()%60))
587 self.logger.write("Begin time : %s\n" %
588 time.strftime('%Y-%m-%d %H:%M:%S',
589 time.localtime(self._T0)) )
591 self.logger.write("End time : %s\n\n" %
592 time.strftime('%Y-%m-%d %H:%M:%S',
593 time.localtime(self._Tf)) )
595 machine_head = "Informations about connection :\n"
596 underline = (len(machine_head) - 2) * "-"
597 self.logger.write(src.printcolors.printcInfo(
598 machine_head+underline+"\n"))
599 self.machine.write_info(self.logger)
601 self.logger.write(src.printcolors.printcInfo("out : \n"))
603 self.logger.write("Unable to get output\n")
605 self.logger.write(self.out + "\n")
606 self.logger.write(src.printcolors.printcInfo("err : \n"))
607 self.logger.write(self.err + "\n")
609 def get_status(self):
610 """Get the status of the job (used by the Gui for xml display)
612 :return: The current status of the job
615 if not self.machine.successfully_connected(self.logger):
616 return "SSH connection KO"
617 if not self.has_begun():
618 return "Not launched"
621 if self.is_running():
622 return "running since " + time.strftime('%Y-%m-%d %H:%M:%S',
623 time.localtime(self._T0))
624 if self.has_finished():
625 if self.is_timeout():
626 return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S',
627 time.localtime(self._Tf))
628 return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S',
629 time.localtime(self._Tf))
632 '''Class to manage the jobs to be run
639 lenght_columns = 20):
640 # The jobs configuration
641 self.cfg_jobs = config_jobs
642 self.job_file_path = job_file_path
643 # The machine that will be used today
645 # The list of machine (hosts, port) that will be used today
646 # (a same host can have several machine instances since there
647 # can be several ssh parameters)
649 # The jobs to be launched today
651 # The jobs that will not be launched today
652 self.ljobs_not_today = []
655 self.len_columns = lenght_columns
657 # the list of jobs that have not been run yet
658 self._l_jobs_not_started = []
659 # the list of jobs that have already ran
660 self._l_jobs_finished = []
661 # the list of jobs that are running
662 self._l_jobs_running = []
664 self.determine_jobs_and_machines()
666 def define_job(self, job_def, machine):
667 '''Takes a pyconf job definition and a machine (from class machine)
668 and returns the job instance corresponding to the definition.
670 :param job_def src.config.Mapping: a job definition
671 :param machine machine: the machine on which the job will run
672 :return: The corresponding job in a job class instance
676 cmmnds = job_def.commands
677 if not "timeout" in job_def:
678 timeout = 4*60*60 # default timeout = 4h
680 timeout = job_def.timeout
682 if 'after' in job_def:
683 after = job_def.after
685 if 'application' in job_def:
686 application = job_def.application
688 if 'board' in job_def:
689 board = job_def.board
691 if "prefix" in job_def:
692 prefix = job_def.prefix
706 def determine_jobs_and_machines(self):
707 '''Function that reads the pyconf jobs definition and instantiates all
708 the machines and jobs to be done today.
713 today = datetime.date.weekday(datetime.date.today())
716 for job_def in self.cfg_jobs.jobs :
718 if not "machine" in job_def:
719 msg = _('WARNING: The job "%s" do not have the key '
720 '"machine", this job is ignored.\n\n' % job_def.name)
721 self.logger.write(src.printcolors.printcWarning(msg))
723 name_machine = job_def.machine
726 for mach in self.lmachines:
727 if mach.name == name_machine:
731 if a_machine == None:
732 for machine_def in self.cfg_jobs.machines:
733 if machine_def.name == name_machine:
734 if 'host' not in machine_def:
735 host = self.runner.cfg.VARS.hostname
737 host = machine_def.host
739 if 'user' not in machine_def:
740 user = self.runner.cfg.VARS.user
742 user = machine_def.user
744 if 'port' not in machine_def:
747 port = machine_def.port
749 if 'password' not in machine_def:
752 passwd = machine_def.password
754 if 'sat_path' not in machine_def:
755 sat_path = "salomeTools"
757 sat_path = machine_def.sat_path
768 self.lmachines.append(a_machine)
769 if (host, port) not in host_list:
770 host_list.append((host, port))
772 if a_machine == None:
773 msg = _("WARNING: The job \"%(job_name)s\" requires the "
774 "machine \"%(machine_name)s\" but this machine "
775 "is not defined in the configuration file.\n"
776 "The job will not be launched\n")
777 self.logger.write(src.printcolors.printcWarning(
778 msg % {"job_name" : job_def.name,
779 "machine_name" : name_machine}))
782 a_job = self.define_job(job_def, a_machine)
784 if today in job_def.when:
785 self.ljobs.append(a_job)
786 else: # today in job_def.when
787 self.ljobs_not_today.append(a_job)
789 self.lhosts = host_list
791 def ssh_connection_all_machines(self, pad=50):
792 '''Function that do the ssh connection to every machine
798 self.logger.write(src.printcolors.printcInfo((
799 "Establishing connection with all the machines :\n")))
800 for machine in self.lmachines:
801 # little algorithm in order to display traces
802 begin_line = (_("Connection to %s: " % machine.name))
803 if pad - len(begin_line) < 0:
806 endline = (pad - len(begin_line)) * "." + " "
808 step = "SSH connection"
809 self.logger.write( begin_line + endline + step)
811 # the call to the method that initiate the ssh connection
812 msg = machine.connect(self.logger)
814 # Copy salomeTools to the remote machine
815 if machine.successfully_connected(self.logger):
816 step = _("Remove SAT")
817 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
818 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
819 (__, out_dist, __) = machine.exec_command(
820 "rm -rf %s" % machine.sat_path,
826 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
827 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
829 res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway,
832 # set the local settings of sat on the remote machine using
834 (__, out_dist, __) = machine.exec_command(
835 os.path.join(machine.sat_path,
836 "sat init --base default --workdir"
837 " default --log_dir default"),
841 # get the remote machine distribution using a sat command
842 (__, out_dist, __) = machine.exec_command(
843 os.path.join(machine.sat_path,
844 "sat config --value VARS.dist --no_label"),
846 machine.distribution = out_dist.read().decode().replace("\n",
849 # Print the status of the copy
851 self.logger.write('\r%s' %
852 ((len(begin_line)+len(endline)+20) * " "), 3)
853 self.logger.write('\r%s%s%s' %
856 src.printcolors.printc(src.OK_STATUS)), 3)
858 self.logger.write('\r%s' %
859 ((len(begin_line)+len(endline)+20) * " "), 3)
860 self.logger.write('\r%s%s%s %s' %
863 src.printcolors.printc(src.KO_STATUS),
864 _("Copy of SAT failed: %s" % res_copy)), 3)
866 self.logger.write('\r%s' %
867 ((len(begin_line)+len(endline)+20) * " "), 3)
868 self.logger.write('\r%s%s%s %s' %
871 src.printcolors.printc(src.KO_STATUS),
873 self.logger.write("\n", 3)
875 self.logger.write("\n")
878 def is_occupied(self, hostname):
879 '''Function that returns True if a job is running on
880 the machine defined by its host and its port.
882 :param hostname (str, int): the pair (host, port)
883 :return: the job that is running on the host,
884 or false if there is no job running on the host.
889 for jb in self.ljobs:
890 if jb.machine.host == host and jb.machine.port == port:
895 def update_jobs_states_list(self):
896 '''Function that updates the lists that store the currently
897 running jobs and the jobs that have already finished.
902 jobs_finished_list = []
903 jobs_running_list = []
904 for jb in self.ljobs:
906 jobs_running_list.append(jb)
908 if jb.has_finished():
909 jobs_finished_list.append(jb)
911 nb_job_finished_before = len(self._l_jobs_finished)
912 self._l_jobs_finished = jobs_finished_list
913 self._l_jobs_running = jobs_running_list
915 nb_job_finished_now = len(self._l_jobs_finished)
917 return nb_job_finished_now > nb_job_finished_before
919 def cancel_dependencies_of_failing_jobs(self):
920 '''Function that cancels all the jobs that depend on a failing one.
926 for job in self.ljobs:
927 if job.after is None:
929 father_job = self.find_job_that_has_name(job.after)
930 if father_job is not None and father_job.has_failed():
933 def find_job_that_has_name(self, name):
934 '''Returns the job by its name.
936 :param name str: a job name
937 :return: the job that has the name.
940 for jb in self.ljobs:
943 # the following is executed only if the job was not found
946 def str_of_length(self, text, length):
947 '''Takes a string text of any length and returns
948 the most close string of length "length".
950 :param text str: any string
951 :param length int: a length for the returned string
952 :return: the most close string of length "length"
955 if len(text) > length:
956 text_out = text[:length-3] + '...'
958 diff = length - len(text)
959 before = " " * (diff//2)
960 after = " " * (diff//2 + diff%2)
961 text_out = before + text + after
965 def display_status(self, len_col):
966 '''Takes a lenght and construct the display of the current status
967 of the jobs in an array that has a column for each host.
968 It displays the job that is currently running on the host
971 :param len_col int: the size of the column
977 for host_port in self.lhosts:
978 jb = self.is_occupied(host_port)
979 if not jb: # nothing running on the host
980 empty = self.str_of_length("empty", len_col)
981 display_line += "|" + empty
983 display_line += "|" + src.printcolors.printcInfo(
984 self.str_of_length(jb.name, len_col))
986 self.logger.write("\r" + display_line + "|")
991 '''The main method. Runs all the jobs on every host.
992 For each host, at a given time, only one job can be running.
993 The jobs that have the field after (that contain the job that has
994 to be run before it) are run after the previous job.
995 This method stops when all the jobs are finished.
1002 self.logger.write(src.printcolors.printcInfo(
1003 _('Executing the jobs :\n')))
1005 for host_port in self.lhosts:
1008 if port == 22: # default value
1009 text_line += "|" + self.str_of_length(host, self.len_columns)
1011 text_line += "|" + self.str_of_length(
1012 "("+host+", "+str(port)+")", self.len_columns)
1014 tiret_line = " " + "-"*(len(text_line)-1) + "\n"
1015 self.logger.write(tiret_line)
1016 self.logger.write(text_line + "|\n")
1017 self.logger.write(tiret_line)
1020 # The infinite loop that runs the jobs
1021 l_jobs_not_started = src.deepcopy_list(self.ljobs)
1022 while len(self._l_jobs_finished) != len(self.ljobs):
1023 new_job_start = False
1024 for host_port in self.lhosts:
1026 if self.is_occupied(host_port):
1029 for jb in l_jobs_not_started:
1030 if (jb.machine.host, jb.machine.port) != host_port:
1032 if jb.after == None:
1034 l_jobs_not_started.remove(jb)
1035 new_job_start = True
1038 jb_before = self.find_job_that_has_name(jb.after)
1039 if jb_before is None:
1041 msg = _("This job was not launched because its "
1042 "father is not in the jobs list.")
1046 if jb_before.has_finished():
1048 l_jobs_not_started.remove(jb)
1049 new_job_start = True
1051 self.cancel_dependencies_of_failing_jobs()
1052 new_job_finished = self.update_jobs_states_list()
1054 if new_job_start or new_job_finished:
1056 self.gui.update_xml_files(self.ljobs)
1057 # Display the current status
1058 self.display_status(self.len_columns)
1060 # Make sure that the proc is not entirely busy
1063 self.logger.write("\n")
1064 self.logger.write(tiret_line)
1065 self.logger.write("\n\n")
1068 self.gui.update_xml_files(self.ljobs)
1069 self.gui.last_update()
1071 def write_all_results(self):
1072 '''Display all the jobs outputs.
1078 for jb in self.ljobs:
1079 self.logger.write(src.printcolors.printcLabel(
1080 "#------- Results for job %s -------#\n" % jb.name))
1082 self.logger.write("\n\n")
1085 '''Class to manage the the xml data that can be displayed in a browser to
1098 :param xml_dir_path str: The path to the directory where to put
1099 the xml resulting files
1100 :param l_jobs List: the list of jobs that run today
1101 :param l_jobs_not_today List: the list of jobs that do not run today
1102 :param file_boards str: the file path from which to read the
1105 # The logging instance
1106 self.logger = logger
1108 # The prefix to add to the xml files : date_hour
1109 self.prefix = prefix
1111 # The path of the csv files to read to fill the expected boards
1112 self.file_boards = file_boards
1114 if file_boards != "":
1115 today = datetime.date.weekday(datetime.date.today())
1116 self.parse_csv_boards(today)
1118 self.d_input_boards = {}
1120 # The path of the global xml file
1121 self.xml_dir_path = xml_dir_path
1122 # Initialize the xml files
1123 self.global_name = "global_report"
1124 xml_global_path = os.path.join(self.xml_dir_path,
1125 self.global_name + ".xml")
1126 self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path,
1129 # Find history for each job
1131 self.find_history(l_jobs, l_jobs_not_today)
1133 # The xml files that corresponds to the boards.
1134 # {name_board : xml_object}}
1135 self.d_xml_board_files = {}
1137 # Create the lines and columns
1138 self.initialize_boards(l_jobs, l_jobs_not_today)
1140 # Write the xml file
1141 self.update_xml_files(l_jobs)
1143 def add_xml_board(self, name):
1144 '''Add a board to the board list
1145 :param name str: the board name
1147 xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
1148 self.d_xml_board_files[name] = src.xmlManager.XmlLogFile(
1151 self.d_xml_board_files[name].add_simple_node("distributions")
1152 self.d_xml_board_files[name].add_simple_node("applications")
1153 self.d_xml_board_files[name].add_simple_node("board", text=name)
1155 def initialize_boards(self, l_jobs, l_jobs_not_today):
1156 '''Get all the first information needed for each file and write the
1157 first version of the files
1158 :param l_jobs List: the list of jobs that run today
1159 :param l_jobs_not_today List: the list of jobs that do not run today
1161 # Get the boards to fill and put it in a dictionary
1162 # {board_name : xml instance corresponding to the board}
1163 for job in l_jobs + l_jobs_not_today:
1165 if (board is not None and
1166 board not in self.d_xml_board_files.keys()):
1167 self.add_xml_board(board)
1169 # Verify that the boards given as input are done
1170 for board in list(self.d_input_boards.keys()):
1171 if board not in self.d_xml_board_files:
1172 self.add_xml_board(board)
1173 root_node = self.d_xml_board_files[board].xmlroot
1174 src.xmlManager.append_node_attrib(root_node,
1175 {"input_file" : self.file_boards})
1177 # Loop over all jobs in order to get the lines and columns for each
1181 for board in self.d_xml_board_files:
1183 d_application[board] = []
1187 for job in l_jobs + l_jobs_not_today:
1189 if (job.machine.host, job.machine.port) not in l_hosts_ports:
1190 l_hosts_ports.append((job.machine.host, job.machine.port))
1192 distrib = job.machine.distribution
1193 application = job.application
1195 board_job = job.board
1198 for board in self.d_xml_board_files:
1199 if board_job == board:
1200 if (distrib not in [None, ''] and
1201 distrib not in d_dist[board]):
1202 d_dist[board].append(distrib)
1203 src.xmlManager.add_simple_node(
1204 self.d_xml_board_files[board].xmlroot.find(
1207 attrib={"name" : distrib})
1209 if board_job == board:
1210 if (application not in [None, ''] and
1211 application not in d_application[board]):
1212 d_application[board].append(application)
1213 src.xmlManager.add_simple_node(
1214 self.d_xml_board_files[board].xmlroot.find(
1218 "name" : application})
1220 # Verify that there are no missing application or distribution in the
1221 # xml board files (regarding the input boards)
1222 for board in self.d_xml_board_files:
1223 l_dist = d_dist[board]
1224 if board not in self.d_input_boards.keys():
1226 for dist in self.d_input_boards[board]["rows"]:
1227 if dist not in l_dist:
1228 src.xmlManager.add_simple_node(
1229 self.d_xml_board_files[board].xmlroot.find(
1232 attrib={"name" : dist})
1233 l_appli = d_application[board]
1234 for appli in self.d_input_boards[board]["columns"]:
1235 if appli not in l_appli:
1236 src.xmlManager.add_simple_node(
1237 self.d_xml_board_files[board].xmlroot.find(
1240 attrib={"name" : appli})
1242 # Initialize the hosts_ports node for the global file
1243 self.xmlhosts_ports = self.xml_global_file.add_simple_node(
1245 for host, port in l_hosts_ports:
1246 host_port = "%s:%i" % (host, port)
1247 src.xmlManager.add_simple_node(self.xmlhosts_ports,
1249 attrib={"name" : host_port})
1251 # Initialize the jobs node in all files
1252 for xml_file in [self.xml_global_file] + list(
1253 self.d_xml_board_files.values()):
1254 xml_jobs = xml_file.add_simple_node("jobs")
1255 # Get the jobs present in the config file but
1256 # that will not be launched today
1257 self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
1259 # add also the infos node
1260 xml_file.add_simple_node("infos",
1261 attrib={"name" : "last update",
1262 "JobsCommandStatus" : "running"})
1264 # and put the history node
1265 history_node = xml_file.add_simple_node("history")
1266 name_board = os.path.basename(xml_file.logFile)[:-len(".xml")]
1267 # serach for board files
1268 expression = "^[0-9]{8}_+[0-9]{6}_" + name_board + ".xml$"
1269 oExpr = re.compile(expression)
1270 # Get the list of xml borad files that are in the log directory
1271 for file_name in os.listdir(self.xml_dir_path):
1272 if oExpr.search(file_name):
1273 date = os.path.basename(file_name).split("_")[0]
1274 file_path = os.path.join(self.xml_dir_path, file_name)
1275 src.xmlManager.add_simple_node(history_node,
1278 attrib={"date" : date})
1281 # Find in each board the squares that needs to be filled regarding the
1282 # input csv files but that are not covered by a today job
1283 for board in self.d_input_boards.keys():
1284 xml_root_board = self.d_xml_board_files[board].xmlroot
1285 # Find the missing jobs for today
1286 xml_missing = src.xmlManager.add_simple_node(xml_root_board,
1288 for row, column in self.d_input_boards[board]["jobs"]:
1291 if (job.application == column and
1292 job.machine.distribution == row):
1296 src.xmlManager.add_simple_node(xml_missing,
1298 attrib={"distribution" : row,
1299 "application" : column })
1300 # Find the missing jobs not today
1301 xml_missing_not_today = src.xmlManager.add_simple_node(
1303 "missing_jobs_not_today")
1304 for row, column in self.d_input_boards[board]["jobs_not_today"]:
1306 for job in l_jobs_not_today:
1307 if (job.application == column and
1308 job.machine.distribution == row):
1312 src.xmlManager.add_simple_node(xml_missing_not_today,
1314 attrib={"distribution" : row,
1315 "application" : column })
1317 def find_history(self, l_jobs, l_jobs_not_today):
1318 """find, for each job, in the existent xml boards the results for the
1319 job. Store the results in the dictionnary self.history = {name_job :
1320 list of (date, status, list links)}
1322 :param l_jobs List: the list of jobs to run today
1323 :param l_jobs_not_today List: the list of jobs that do not run today
1325 # load the all the history
1326 expression = "^[0-9]{8}_+[0-9]{6}_" + self.global_name + ".xml$"
1327 oExpr = re.compile(expression)
1328 # Get the list of global xml that are in the log directory
1330 for file_name in os.listdir(self.xml_dir_path):
1331 if oExpr.search(file_name):
1332 file_path = os.path.join(self.xml_dir_path, file_name)
1334 global_xml = src.xmlManager.ReadXmlFile(file_path)
1335 l_globalxml.append(global_xml)
1336 except Exception as e:
1337 msg = _("\nWARNING: the file %s can not be read, it will be "
1338 "ignored\n%s" % (file_path, e))
1339 self.logger.write("%s\n" % src.printcolors.printcWarning(
1342 # Construct the dictionnary self.history
1343 for job in l_jobs + l_jobs_not_today:
1345 for global_xml in l_globalxml:
1346 date = os.path.basename(global_xml.filePath).split("_")[0]
1347 global_root_node = global_xml.xmlroot.find("jobs")
1348 job_node = src.xmlManager.find_node_by_attrib(
1354 if job_node.find("remote_log_file_path") is not None:
1355 link = job_node.find("remote_log_file_path").text
1356 res_job = job_node.find("res").text
1357 if link != "nothing":
1358 l_links.append((date, res_job, link))
1359 l_links = sorted(l_links, reverse=True)
1360 self.history[job.name] = l_links
1362 def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
1363 '''Get all the first information needed for each file and write the
1364 first version of the files
1366 :param xml_node_jobs etree.Element: the node corresponding to a job
1367 :param l_jobs_not_today List: the list of jobs that do not run today
1369 for job in l_jobs_not_today:
1370 xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1372 attrib={"name" : job.name})
1373 src.xmlManager.add_simple_node(xmlj, "application", job.application)
1374 src.xmlManager.add_simple_node(xmlj,
1376 job.machine.distribution)
1377 src.xmlManager.add_simple_node(xmlj, "board", job.board)
1378 src.xmlManager.add_simple_node(xmlj,
1379 "commands", " ; ".join(job.commands))
1380 src.xmlManager.add_simple_node(xmlj, "state", "Not today")
1381 src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1382 src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1383 src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1384 src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1385 src.xmlManager.add_simple_node(xmlj, "sat_path",
1386 job.machine.sat_path)
1387 xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1388 for i, (date, res_job, link) in enumerate(self.history[job.name]):
1390 # tag the first one (the last one)
1391 src.xmlManager.add_simple_node(xml_history,
1394 attrib={"date" : date,
1398 src.xmlManager.add_simple_node(xml_history,
1401 attrib={"date" : date,
1405 def parse_csv_boards(self, today):
1406 """ Parse the csv file that describes the boards to produce and fill
1407 the dict d_input_boards that contain the csv file contain
1409 :param today int: the current day of the week
1411 # open the csv file and read its content
1413 with open(self.file_boards, 'r') as f:
1414 reader = csv.reader(f,delimiter=CSV_DELIMITER)
1417 # get the delimiter for the boards (empty line)
1418 boards_delimiter = [''] * len(l_read[0])
1419 # Make the list of boards, by splitting with the delimiter
1420 l_boards = [list(y) for x, y in itertools.groupby(l_read,
1421 lambda z: z == boards_delimiter) if not x]
1423 # loop over the csv lists of lines and get the rows, columns and jobs
1425 for input_board in l_boards:
1427 board_name = input_board[0][0]
1430 columns = input_board[0][1:]
1435 for line in input_board[1:]:
1438 for i, square in enumerate(line[1:]):
1441 days = square.split(DAYS_SEPARATOR)
1442 days = [int(day) for day in days]
1443 job = (row, columns[i])
1447 jobs_not_today.append(job)
1449 d_boards[board_name] = {"rows" : rows,
1450 "columns" : columns,
1452 "jobs_not_today" : jobs_not_today}
1454 self.d_input_boards = d_boards
1456 def update_xml_files(self, l_jobs):
1457 '''Write all the xml files with updated information about the jobs
1459 :param l_jobs List: the list of jobs that run today
1461 for xml_file in [self.xml_global_file] + list(
1462 self.d_xml_board_files.values()):
1463 self.update_xml_file(l_jobs, xml_file)
1466 self.write_xml_files()
1468 def update_xml_file(self, l_jobs, xml_file):
1469 '''update information about the jobs for the file xml_file
1471 :param l_jobs List: the list of jobs that run today
1472 :param xml_file xmlManager.XmlLogFile: the xml instance to update
1475 xml_node_jobs = xml_file.xmlroot.find('jobs')
1476 # Update the job names and status node
1478 # Find the node corresponding to the job and delete it
1479 # in order to recreate it
1480 for xmljob in xml_node_jobs.findall('job'):
1481 if xmljob.attrib['name'] == job.name:
1482 xml_node_jobs.remove(xmljob)
1486 T0 = time.strftime('%Y-%m-%d %H:%M:%S',
1487 time.localtime(job._T0))
1490 Tf = time.strftime('%Y-%m-%d %H:%M:%S',
1491 time.localtime(job._Tf))
1493 # recreate the job node
1494 xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1496 attrib={"name" : job.name})
1497 src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1498 src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1499 src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1500 src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1501 xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1502 for date, res_job, link in self.history[job.name]:
1503 src.xmlManager.add_simple_node(xml_history,
1506 attrib={"date" : date,
1509 src.xmlManager.add_simple_node(xmlj, "sat_path",
1510 job.machine.sat_path)
1511 src.xmlManager.add_simple_node(xmlj, "application", job.application)
1512 src.xmlManager.add_simple_node(xmlj, "distribution",
1513 job.machine.distribution)
1514 src.xmlManager.add_simple_node(xmlj, "board", job.board)
1515 src.xmlManager.add_simple_node(xmlj, "timeout", str(job.timeout))
1516 src.xmlManager.add_simple_node(xmlj, "commands",
1517 " ; ".join(job.commands))
1518 src.xmlManager.add_simple_node(xmlj, "state", job.get_status())
1519 src.xmlManager.add_simple_node(xmlj, "begin", T0)
1520 src.xmlManager.add_simple_node(xmlj, "end", Tf)
1521 src.xmlManager.add_simple_node(xmlj, "out",
1522 src.printcolors.cleancolor(job.out))
1523 src.xmlManager.add_simple_node(xmlj, "err",
1524 src.printcolors.cleancolor(job.err))
1525 src.xmlManager.add_simple_node(xmlj, "res", str(job.res_job))
1526 if len(job.remote_log_files) > 0:
1527 src.xmlManager.add_simple_node(xmlj,
1528 "remote_log_file_path",
1529 job.remote_log_files[0])
1531 src.xmlManager.add_simple_node(xmlj,
1532 "remote_log_file_path",
1534 # Search for the test log if there is any
1535 l_test_log_files = self.find_test_log(job.remote_log_files)
1536 xml_test = src.xmlManager.add_simple_node(xmlj,
1537 "test_log_file_path")
1538 for test_log_path, res_test, nb_fails in l_test_log_files:
1539 test_path_node = src.xmlManager.add_simple_node(xml_test,
1542 test_path_node.attrib["res"] = res_test
1543 test_path_node.attrib["nb_fails"] = nb_fails
1545 xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
1546 # get the job father
1547 if job.after is not None:
1550 if jb.name == job.after:
1553 if (job_father is not None and
1554 len(job_father.remote_log_files) > 0):
1555 link = job_father.remote_log_files[0]
1558 src.xmlManager.append_node_attrib(xmlafter, {"link" : link})
1560 # Verify that the job is to be done today regarding the input csv
1562 if job.board and job.board in self.d_input_boards.keys():
1564 for dist, appli in self.d_input_boards[job.board]["jobs"]:
1565 if (job.machine.distribution == dist
1566 and job.application == appli):
1568 src.xmlManager.add_simple_node(xmlj,
1573 src.xmlManager.add_simple_node(xmlj,
1579 xml_node_infos = xml_file.xmlroot.find('infos')
1580 src.xmlManager.append_node_attrib(xml_node_infos,
1582 datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
1585 def find_test_log(self, l_remote_log_files):
1586 '''Find if there is a test log (board) in the remote log files and
1587 the path to it. There can be several test command, so the result is
1590 :param l_remote_log_files List: the list of all remote log files
1591 :return: the list of (test log files path, res of the command)
1595 for file_path in l_remote_log_files:
1596 dirname = os.path.basename(os.path.dirname(file_path))
1597 file_name = os.path.basename(file_path)
1598 regex = src.logger.log_all_command_file_expression
1599 oExpr = re.compile(regex)
1600 if dirname == "TEST" and oExpr.search(file_name):
1601 # find the res of the command
1602 prod_node = etree.parse(file_path).getroot().find("product")
1603 res_test = prod_node.attrib["global_res"]
1604 # find the number of fails
1605 testbase_node = prod_node.find("tests").find("testbase")
1606 nb_fails = int(testbase_node.attrib["failed"])
1607 # put the file path, the res of the test command and the number
1608 # of fails in the output
1609 res.append((file_path, res_test, nb_fails))
1613 def last_update(self, finish_status = "finished"):
1614 '''update information about the jobs for the file xml_file
1616 :param l_jobs List: the list of jobs that run today
1617 :param xml_file xmlManager.XmlLogFile: the xml instance to update
1619 for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
1620 xml_node_infos = xml_file.xmlroot.find('infos')
1621 src.xmlManager.append_node_attrib(xml_node_infos,
1622 attrib={"JobsCommandStatus" : finish_status})
1624 self.write_xml_files()
1626 def write_xml_file(self, xml_file, stylesheet):
1627 ''' Write one xml file and the same file with prefix
1629 xml_file.write_tree(stylesheet)
1630 file_path = xml_file.logFile
1631 file_dir = os.path.dirname(file_path)
1632 file_name = os.path.basename(file_path)
1633 file_name_with_prefix = self.prefix + "_" + file_name
1634 xml_file.write_tree(stylesheet, os.path.join(file_dir,
1635 file_name_with_prefix))
1637 def write_xml_files(self):
1638 ''' Write the xml files
1640 self.write_xml_file(self.xml_global_file, STYLESHEET_GLOBAL)
1641 for xml_file in self.d_xml_board_files.values():
1642 self.write_xml_file(xml_file, STYLESHEET_BOARD)
1644 def get_config_file_path(job_config_name, l_cfg_dir):
1646 file_jobs_cfg = None
1647 if os.path.exists(job_config_name) and job_config_name.endswith(".pyconf"):
1649 file_jobs_cfg = job_config_name
1651 for cfg_dir in l_cfg_dir:
1652 file_jobs_cfg = os.path.join(cfg_dir, job_config_name)
1653 if not file_jobs_cfg.endswith('.pyconf'):
1654 file_jobs_cfg += '.pyconf'
1656 if not os.path.exists(file_jobs_cfg):
1661 return found, file_jobs_cfg
1663 def develop_factorized_jobs(config_jobs):
1664 '''update information about the jobs for the file xml_file
1666 :param config_jobs Config: the config corresponding to the jos description
1668 developed_jobs_list = []
1669 for jb in config_jobs.jobs:
1670 # case where the jobs are not developed
1671 if type(jb.machine) == type(""):
1672 developed_jobs_list.append(jb)
1674 # Case where the jobs must be developed
1676 # machine : ["CO7.2 physique", ["CO6.4 physique", $MONDAY, $TUESDAY ], "FD22"]
1678 for machine in jb.machine:
1679 new_job = src.pyconf.deepCopyMapping(jb)
1680 # case where there is a jobs on the machine corresponding to all
1681 # days in when variable.
1682 if type(machine) == type(""):
1683 new_job.machine = machine
1684 new_job.name = name_job + " / " + machine
1686 # case the days are re defined
1687 new_job.machine = machine[0]
1688 new_job.name = name_job + " / " + machine[0]
1689 new_job.when = machine[1:]
1690 developed_jobs_list.append(new_job)
1692 config_jobs.jobs = developed_jobs_list
1696 # Describes the command
1698 return _("The jobs command launches maintenances that are described"
1699 " in the dedicated jobs configuration file.\n\nexample:\nsat "
1700 "jobs --name my_jobs --publish")
1704 def run(args, runner, logger):
1706 (options, args) = parser.parse_args(args)
1708 l_cfg_dir = runner.cfg.PATHS.JOBPATH
1710 # list option : display all the available config files
1712 for cfg_dir in l_cfg_dir:
1713 if not options.no_label:
1714 logger.write("------ %s\n" %
1715 src.printcolors.printcHeader(cfg_dir))
1716 if not os.path.exists(cfg_dir):
1718 for f in sorted(os.listdir(cfg_dir)):
1719 if not f.endswith('.pyconf'):
1722 logger.write("%s\n" % cfilename)
1725 # Make sure the jobs_config option has been called
1726 if not options.jobs_cfg:
1727 message = _("The option --jobs_config is required\n")
1728 src.printcolors.printcError(message)
1731 # Find the file in the directories, unless it is a full path
1732 # merge all in a config
1733 merger = src.pyconf.ConfigMerger()
1734 config_jobs = src.pyconf.Config()
1735 l_conf_files_path = []
1736 for config_file in options.jobs_cfg:
1737 found, file_jobs_cfg = get_config_file_path(config_file, l_cfg_dir)
1739 msg = _("The file configuration %s was not found."
1740 "\nUse the --list option to get the "
1741 "possible files." % config_file)
1742 logger.write("%s\n" % src.printcolors.printcError(msg), 1)
1744 l_conf_files_path.append(file_jobs_cfg)
1745 # Read the config that is in the file
1746 one_config_jobs = src.read_config_from_a_file(file_jobs_cfg)
1747 merger.merge(config_jobs, one_config_jobs)
1750 (_("Platform"), runner.cfg.VARS.dist),
1751 (_("Files containing the jobs configuration"), l_conf_files_path)
1753 src.print_info(logger, info)
1755 if options.only_jobs:
1756 l_jb = src.pyconf.Sequence()
1757 for jb in config_jobs.jobs:
1758 if jb.name in options.only_jobs:
1760 "Job that was given in only_jobs option parameters\n")
1761 config_jobs.jobs = l_jb
1763 # Parse the config jobs in order to develop all the factorized jobs
1764 develop_factorized_jobs(config_jobs)
1766 # Make a unique file that contain all the jobs in order to use it
1768 name_pyconf = "_".join([os.path.basename(path)[:-len('.pyconf')]
1769 for path in l_conf_files_path]) + ".pyconf"
1770 path_pyconf = src.get_tmp_filename(runner.cfg, name_pyconf)
1772 f = file( path_pyconf , 'w')
1773 config_jobs.__save__(f)
1775 # log the paramiko problems
1776 log_dir = src.get_log_path(runner.cfg)
1777 paramiko_log_dir_path = os.path.join(log_dir, "JOBS")
1778 src.ensure_path_exists(paramiko_log_dir_path)
1779 paramiko.util.log_to_file(os.path.join(paramiko_log_dir_path,
1780 logger.txtFileName))
1783 today_jobs = Jobs(runner,
1788 # SSH connection to all machines
1789 today_jobs.ssh_connection_all_machines()
1790 if options.test_connection:
1795 logger.write(src.printcolors.printcInfo(
1796 _("Initialize the xml boards : ")), 5)
1799 # Copy the stylesheets in the log directory
1801 xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
1803 files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
1804 files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD))
1805 files_to_copy.append(os.path.join(xsl_dir, "command.xsl"))
1806 files_to_copy.append(os.path.join(xsl_dir, "running.gif"))
1807 for file_path in files_to_copy:
1808 # OP We use copy instead of copy2 to update the creation date
1809 # So we can clean the LOGS directories easily
1810 shutil.copy(file_path, log_dir)
1812 # Instanciate the Gui in order to produce the xml files that contain all
1816 today_jobs.ljobs_not_today,
1817 runner.cfg.VARS.datehour,
1819 file_boards = options.input_boards)
1821 logger.write(src.printcolors.printcSuccess("OK"), 5)
1822 logger.write("\n\n", 5)
1825 # Display the list of the xml files
1826 logger.write(src.printcolors.printcInfo(("Here is the list of published"
1828 logger.write("%s\n" % gui.xml_global_file.logFile, 4)
1829 for board in gui.d_xml_board_files.keys():
1830 file_path = gui.d_xml_board_files[board].logFile
1831 file_name = os.path.basename(file_path)
1832 logger.write("%s\n" % file_path, 4)
1833 logger.add_link(file_name, "board", 0, board)
1835 logger.write("\n", 4)
1837 today_jobs.gui = gui
1841 # Run all the jobs contained in config_jobs
1842 today_jobs.run_jobs()
1843 except KeyboardInterrupt:
1845 logger.write("\n\n%s\n\n" %
1846 (src.printcolors.printcWarning(_("Forced interruption"))), 1)
1847 except Exception as e:
1848 msg = _("CRITICAL ERROR: The jobs loop has been interrupted\n")
1849 logger.write("\n\n%s\n" % src.printcolors.printcError(msg) )
1850 logger.write("%s\n" % str(e))
1852 __, __, exc_traceback = sys.exc_info()
1853 fp = tempfile.TemporaryFile()
1854 traceback.print_tb(exc_traceback, file=fp)
1857 logger.write("\nTRACEBACK: %s\n" % stack.replace('"',"'"), 1)
1863 msg = _("Killing the running jobs and trying"
1864 " to get the corresponding logs\n")
1865 logger.write(src.printcolors.printcWarning(msg))
1867 # find the potential not finished jobs and kill them
1868 for jb in today_jobs.ljobs:
1869 if not jb.has_finished():
1872 jb.kill_remote_process()
1873 except Exception as e:
1874 msg = _("Failed to kill job %s: %s\n" % (jb.name, e))
1875 logger.write(src.printcolors.printcWarning(msg))
1876 if jb.res_job != "0":
1880 today_jobs.gui.last_update(_("Forced interruption"))
1883 today_jobs.gui.last_update()
1884 # Output the results
1885 today_jobs.write_all_results()
1886 # Remove the temporary pyconf file
1887 if os.path.exists(path_pyconf):
1888 os.remove(path_pyconf)