Salome HOME
sat jobs: add the possibility to add a prefix to the command launched on the remote...
[tools/sat.git] / commands / jobs.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2013  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import datetime
21 import time
22 import csv
23 import shutil
24 import itertools
25 import re
26 import paramiko
27
28 import src
29
30 STYLESHEET_GLOBAL = "jobs_global_report.xsl"
31 STYLESHEET_BOARD = "jobs_board_report.xsl"
32
33 DAYS_SEPARATOR = ","
34 CSV_DELIMITER = ";"
35
36 parser = src.options.Options()
37
38 parser.add_option('n', 'name', 'string', 'jobs_cfg', 
39                   _('Mandatory: The name of the config file that contains'
40                   ' the jobs configuration'))
41 parser.add_option('o', 'only_jobs', 'list2', 'only_jobs',
42                   _('Optional: the list of jobs to launch, by their name. '))
43 parser.add_option('l', 'list', 'boolean', 'list', 
44                   _('Optional: list all available config files.'))
45 parser.add_option('t', 'test_connection', 'boolean', 'test_connection',
46                   _("Optional: try to connect to the machines. "
47                     "Not executing the jobs."),
48                   False)
49 parser.add_option('p', 'publish', 'boolean', 'publish',
50                   _("Optional: generate an xml file that can be read in a "
51                     "browser to display the jobs status."),
52                   False)
53 parser.add_option('i', 'input_boards', 'string', 'input_boards', _("Optional: "
54                                 "the path to csv file that contain "
55                                 "the expected boards."),"")
56 parser.add_option('', 'completion', 'boolean', 'no_label',
57                   _("Optional (internal use): do not print labels, Works only "
58                     "with --list."),
59                   False)
60
61 class Machine(object):
62     '''Class to manage a ssh connection on a machine
63     '''
64     def __init__(self,
65                  name,
66                  host,
67                  user,
68                  port=22,
69                  passwd=None,
70                  sat_path="salomeTools"):
71         self.name = name
72         self.host = host
73         self.port = port
74         self.distribution = None # Will be filled after copying SAT on the machine
75         self.user = user
76         self.password = passwd
77         self.sat_path = sat_path
78         self.ssh = paramiko.SSHClient()
79         self._connection_successful = None
80     
81     def connect(self, logger):
82         '''Initiate the ssh connection to the remote machine
83         
84         :param logger src.logger.Logger: The logger instance 
85         :return: Nothing
86         :rtype: N\A
87         '''
88
89         self._connection_successful = False
90         self.ssh.load_system_host_keys()
91         self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
92         try:
93             self.ssh.connect(self.host,
94                              port=self.port,
95                              username=self.user,
96                              password = self.password)
97         except paramiko.AuthenticationException:
98             message = src.KO_STATUS + _("Authentication failed")
99         except paramiko.BadHostKeyException:
100             message = (src.KO_STATUS + 
101                        _("The server's host key could not be verified"))
102         except paramiko.SSHException:
103             message = ( _("SSHException error connecting or "
104                           "establishing an SSH session"))            
105         except:
106             message = ( _("Error connecting or establishing an SSH session"))
107         else:
108             self._connection_successful = True
109             message = ""
110         return message
111     
112     def successfully_connected(self, logger):
113         '''Verify if the connection to the remote machine has succeed
114         
115         :param logger src.logger.Logger: The logger instance 
116         :return: True if the connection has succeed, False if not
117         :rtype: bool
118         '''
119         if self._connection_successful == None:
120             message = _("Warning : trying to ask if the connection to "
121             "(name: %s host: %s, port: %s, user: %s) is OK whereas there were"
122             " no connection request" % 
123                         (self.name, self.host, self.port, self.user))
124             logger.write( src.printcolors.printcWarning(message))
125         return self._connection_successful
126
127     def copy_sat(self, sat_local_path, job_file):
128         '''Copy salomeTools to the remote machine in self.sat_path
129         '''
130         res = 0
131         try:
132             # open a sftp connection
133             self.sftp = self.ssh.open_sftp()
134             # Create the sat directory on remote machine if it is not existing
135             self.mkdir(self.sat_path, ignore_existing=True)
136             # Put sat
137             self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
138             # put the job configuration file in order to make it reachable 
139             # on the remote machine
140             self.sftp.put(job_file, os.path.join(".salomeTools",
141                                                  "Jobs",
142                                                  ".jobs_command_file.pyconf"))
143         except Exception as e:
144             res = str(e)
145             self._connection_successful = False
146         
147         return res
148         
149     def put_dir(self, source, target, filters = []):
150         ''' Uploads the contents of the source directory to the target path. The
151             target directory needs to exists. All sub-directories in source are 
152             created under target.
153         '''
154         for item in os.listdir(source):
155             if item in filters:
156                 continue
157             source_path = os.path.join(source, item)
158             destination_path = os.path.join(target, item)
159             if os.path.islink(source_path):
160                 linkto = os.readlink(source_path)
161                 try:
162                     self.sftp.symlink(linkto, destination_path)
163                     self.sftp.chmod(destination_path,
164                                     os.stat(source_path).st_mode)
165                 except IOError:
166                     pass
167             else:
168                 if os.path.isfile(source_path):
169                     self.sftp.put(source_path, destination_path)
170                     self.sftp.chmod(destination_path,
171                                     os.stat(source_path).st_mode)
172                 else:
173                     self.mkdir(destination_path, ignore_existing=True)
174                     self.put_dir(source_path, destination_path)
175
176     def mkdir(self, path, mode=511, ignore_existing=False):
177         ''' Augments mkdir by adding an option to not fail 
178             if the folder exists 
179         '''
180         try:
181             self.sftp.mkdir(path, mode)
182         except IOError:
183             if ignore_existing:
184                 pass
185             else:
186                 raise       
187     
188     def exec_command(self, command, logger):
189         '''Execute the command on the remote machine
190         
191         :param command str: The command to be run
192         :param logger src.logger.Logger: The logger instance 
193         :return: the stdin, stdout, and stderr of the executing command,
194                  as a 3-tuple
195         :rtype: (paramiko.channel.ChannelFile, paramiko.channel.ChannelFile,
196                 paramiko.channel.ChannelFile)
197         '''
198         try:        
199             # Does not wait the end of the command
200             (stdin, stdout, stderr) = self.ssh.exec_command(command)
201         except paramiko.SSHException:
202             message = src.KO_STATUS + _(
203                             ": the server failed to execute the command\n")
204             logger.write( src.printcolors.printcError(message))
205             return (None, None, None)
206         except:
207             logger.write( src.printcolors.printcError(src.KO_STATUS + '\n'))
208             return (None, None, None)
209         else:
210             return (stdin, stdout, stderr)
211
212     def close(self):
213         '''Close the ssh connection
214         
215         :rtype: N\A
216         '''
217         self.ssh.close()
218      
219     def write_info(self, logger):
220         '''Prints the informations relative to the machine in the logger 
221            (terminal traces and log file)
222         
223         :param logger src.logger.Logger: The logger instance
224         :return: Nothing
225         :rtype: N\A
226         '''
227         logger.write("host : " + self.host + "\n")
228         logger.write("port : " + str(self.port) + "\n")
229         logger.write("user : " + str(self.user) + "\n")
230         if self.successfully_connected(logger):
231             status = src.OK_STATUS
232         else:
233             status = src.KO_STATUS
234         logger.write("Connection : " + status + "\n\n") 
235
236
237 class Job(object):
238     '''Class to manage one job
239     '''
240     def __init__(self, name, machine, application, board, 
241                  commands, timeout, config, logger, after=None, prefix=None):
242
243         self.name = name
244         self.machine = machine
245         self.after = after
246         self.timeout = timeout
247         self.application = application
248         self.board = board
249         self.config = config
250         self.logger = logger
251         # The list of log files to download from the remote machine 
252         self.remote_log_files = []
253         
254         # The remote command status
255         # -1 means that it has not been launched, 
256         # 0 means success and 1 means fail
257         self.res_job = "-1"
258         self.cancelled = False
259         
260         self._T0 = -1
261         self._Tf = -1
262         self._has_begun = False
263         self._has_finished = False
264         self._has_timouted = False
265         self._stdin = None # Store the command inputs field
266         self._stdout = None # Store the command outputs field
267         self._stderr = None # Store the command errors field
268
269         self.out = ""
270         self.err = ""
271                
272         self.commands = commands
273         self.command = (os.path.join(self.machine.sat_path, "sat") +
274                         " -l " +
275                         os.path.join(self.machine.sat_path,
276                                      "list_log_files.txt") +
277                         " job --jobs_config .jobs_command_file" +
278                         " --name " +
279                         self.name)
280         if prefix:
281             self.command = prefix + ' "' + self.command +'"'
282     
283     def get_pids(self):
284         """ Get the pid(s) corresponding to the command that have been launched
285             On the remote machine
286         
287         :return: The list of integers corresponding to the found pids
288         :rtype: List
289         """
290         pids = []
291         cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\''
292         (_, out_pid, _) = self.machine.exec_command(cmd_pid, self.logger)
293         pids_cmd = out_pid.readlines()
294         pids_cmd = [str(src.only_numbers(pid)) for pid in pids_cmd]
295         pids+=pids_cmd
296         return pids
297     
298     def kill_remote_process(self, wait=1):
299         '''Kills the process on the remote machine.
300         
301         :return: (the output of the kill, the error of the kill)
302         :rtype: (str, str)
303         '''
304         
305         pids = self.get_pids()
306         cmd_kill = " ; ".join([("kill -2 " + pid) for pid in pids])
307         (_, out_kill, err_kill) = self.machine.exec_command(cmd_kill, 
308                                                             self.logger)
309         time.sleep(wait)
310         return (out_kill, err_kill)
311             
312     def has_begun(self):
313         '''Returns True if the job has already begun
314         
315         :return: True if the job has already begun
316         :rtype: bool
317         '''
318         return self._has_begun
319     
320     def has_finished(self):
321         '''Returns True if the job has already finished 
322            (i.e. all the commands have been executed)
323            If it is finished, the outputs are stored in the fields out and err.
324         
325         :return: True if the job has already finished
326         :rtype: bool
327         '''
328         
329         # If the method has already been called and returned True
330         if self._has_finished:
331             return True
332         
333         # If the job has not begun yet
334         if not self.has_begun():
335             return False
336         
337         if self._stdout.channel.closed:
338             self._has_finished = True
339             # Store the result outputs
340             self.out += self._stdout.read().decode()
341             self.err += self._stderr.read().decode()
342             # Put end time
343             self._Tf = time.time()
344             # And get the remote command status and log files
345             self.get_log_files()
346         
347         return self._has_finished
348           
349     def get_log_files(self):
350         """Get the log files produced by the command launched 
351            on the remote machine, and put it in the log directory of the user,
352            so they can be accessible from 
353         """
354         # Do not get the files if the command is not finished
355         if not self.has_finished():
356             msg = _("Trying to get log files whereas the job is not finished.")
357             self.logger.write(src.printcolors.printcWarning(msg))
358             return
359         
360         # First get the file that contains the list of log files to get
361         tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
362         remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt")
363         self.machine.sftp.get(
364                     remote_path,
365                     tmp_file_path)
366         
367         # Read the file and get the result of the command and all the log files
368         # to get
369         fstream_tmp = open(tmp_file_path, "r")
370         file_lines = fstream_tmp.readlines()
371         file_lines = [line.replace("\n", "") for line in file_lines]
372         fstream_tmp.close()
373         os.remove(tmp_file_path)
374         
375         try :
376             # The first line is the result of the command (0 success or 1 fail)
377             self.res_job = file_lines[0]
378         except Exception as e:
379             self.err += _("Unable to get status from remote file %s: %s" % 
380                                                     (remote_path, str(e)))
381
382         for i, job_path_remote in enumerate(file_lines[1:]):
383             try:
384                 # For each command, there is two files to get :
385                 # 1- The xml file describing the command and giving the 
386                 # internal traces.
387                 # 2- The txt file containing the system command traces (like 
388                 # traces produced by the "make" command)
389                 # 3- In case of the test command, there is another file to get :
390                 # the xml board that contain the test results
391                 dirname = os.path.basename(os.path.dirname(job_path_remote))
392                 if dirname != 'OUT' and dirname != 'TEST':
393                     # Case 1-
394                     local_path = os.path.join(os.path.dirname(
395                                                         self.logger.logFilePath),
396                                               os.path.basename(job_path_remote))
397                     if i==0: # The first is the job command
398                         self.logger.add_link(os.path.basename(job_path_remote),
399                                              "job",
400                                              self.res_job,
401                                              self.command) 
402                 elif dirname == 'OUT':
403                     # Case 2-
404                     local_path = os.path.join(os.path.dirname(
405                                                         self.logger.logFilePath),
406                                               'OUT',
407                                               os.path.basename(job_path_remote))
408                 elif dirname == 'TEST':
409                     # Case 3-
410                     local_path = os.path.join(os.path.dirname(
411                                                         self.logger.logFilePath),
412                                               'TEST',
413                                               os.path.basename(job_path_remote))
414                 
415                 # Get the file
416                 if not os.path.exists(local_path):
417                     self.machine.sftp.get(job_path_remote, local_path)
418                 self.remote_log_files.append(local_path)
419             except Exception as e:
420                 self.err += _("Unable to get %s log file from remote: %s" % 
421                                                     (str(job_path_remote),
422                                                      str(e)))
423
424     def has_failed(self):
425         '''Returns True if the job has failed. 
426            A job is considered as failed if the machine could not be reached,
427            if the remote command failed, 
428            or if the job finished with a time out.
429         
430         :return: True if the job has failed
431         :rtype: bool
432         '''
433         if not self.has_finished():
434             return False
435         if not self.machine.successfully_connected(self.logger):
436             return True
437         if self.is_timeout():
438             return True
439         if self.res_job == "1":
440             return True
441         return False
442     
443     def cancel(self):
444         """In case of a failing job, one has to cancel every job that depend 
445            on it. This method put the job as failed and will not be executed.
446         """
447         if self.cancelled:
448             return
449         self._has_begun = True
450         self._has_finished = True
451         self.cancelled = True
452         self.out += _("This job was not launched because its father has failed.")
453         self.err += _("This job was not launched because its father has failed.")
454
455     def is_running(self):
456         '''Returns True if the job commands are running 
457         
458         :return: True if the job is running
459         :rtype: bool
460         '''
461         return self.has_begun() and not self.has_finished()
462
463     def is_timeout(self):
464         '''Returns True if the job commands has finished with timeout 
465         
466         :return: True if the job has finished with timeout
467         :rtype: bool
468         '''
469         return self._has_timouted
470
471     def time_elapsed(self):
472         """Get the time elapsed since the job launching
473         
474         :return: The number of seconds
475         :rtype: int
476         """
477         if not self.has_begun():
478             return -1
479         T_now = time.time()
480         return T_now - self._T0
481     
482     def check_time(self):
483         """Verify that the job has not exceeded its timeout.
484            If it has, kill the remote command and consider the job as finished.
485         """
486         if not self.has_begun():
487             return
488         if self.time_elapsed() > self.timeout:
489             self._has_finished = True
490             self._has_timouted = True
491             self._Tf = time.time()
492             self.get_pids()
493             (out_kill, _) = self.kill_remote_process()
494             self.out += "TIMEOUT \n" + out_kill.read().decode()
495             self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
496             try:
497                 self.get_log_files()
498             except Exception as e:
499                 self.err += _("Unable to get remote log files: %s" % e)
500             
501     def total_duration(self):
502         """Give the total duration of the job
503         
504         :return: the total duration of the job in seconds
505         :rtype: int
506         """
507         return self._Tf - self._T0
508         
509     def run(self):
510         """Launch the job by executing the remote command.
511         """
512         
513         # Prevent multiple run
514         if self.has_begun():
515             msg = _("Warning: A job can only be launched one time")
516             msg2 = _("Trying to launch the job \"%s\" whereas it has "
517                      "already been launched." % self.name)
518             self.logger.write(src.printcolors.printcWarning("%s\n%s\n" % (msg,
519                                                                         msg2)))
520             return
521         
522         # Do not execute the command if the machine could not be reached
523         if not self.machine.successfully_connected(self.logger):
524             self._has_finished = True
525             self.out = "N\A"
526             self.err += ("Connection to machine (name : %s, host: %s, port:"
527                         " %s, user: %s) has failed\nUse the log command "
528                         "to get more information."
529                         % (self.machine.name,
530                            self.machine.host,
531                            self.machine.port,
532                            self.machine.user))
533         else:
534             # Usual case : Launch the command on remote machine
535             self._T0 = time.time()
536             self._stdin, self._stdout, self._stderr = self.machine.exec_command(
537                                                                   self.command,
538                                                                   self.logger)
539             # If the results are not initialized, finish the job
540             if (self._stdin, self._stdout, self._stderr) == (None, None, None):
541                 self._has_finished = True
542                 self._Tf = time.time()
543                 self.out += "N\A"
544                 self.err += "The server failed to execute the command"
545         
546         # Put the beginning flag to true.
547         self._has_begun = True
548     
549     def write_results(self):
550         """Display on the terminal all the job's information
551         """
552         self.logger.write("name : " + self.name + "\n")
553         if self.after:
554             self.logger.write("after : %s\n" % self.after)
555         self.logger.write("Time elapsed : %4imin %2is \n" % 
556                      (self.total_duration()//60 , self.total_duration()%60))
557         if self._T0 != -1:
558             self.logger.write("Begin time : %s\n" % 
559                          time.strftime('%Y-%m-%d %H:%M:%S', 
560                                        time.localtime(self._T0)) )
561         if self._Tf != -1:
562             self.logger.write("End time   : %s\n\n" % 
563                          time.strftime('%Y-%m-%d %H:%M:%S', 
564                                        time.localtime(self._Tf)) )
565         
566         machine_head = "Informations about connection :\n"
567         underline = (len(machine_head) - 2) * "-"
568         self.logger.write(src.printcolors.printcInfo(
569                                                 machine_head+underline+"\n"))
570         self.machine.write_info(self.logger)
571         
572         self.logger.write(src.printcolors.printcInfo("out : \n"))
573         if self.out == "":
574             self.logger.write("Unable to get output\n")
575         else:
576             self.logger.write(self.out + "\n")
577         self.logger.write(src.printcolors.printcInfo("err : \n"))
578         self.logger.write(self.err + "\n")
579         
580     def get_status(self):
581         """Get the status of the job (used by the Gui for xml display)
582         
583         :return: The current status of the job
584         :rtype: String
585         """
586         if not self.machine.successfully_connected(self.logger):
587             return "SSH connection KO"
588         if not self.has_begun():
589             return "Not launched"
590         if self.cancelled:
591             return "Cancelled"
592         if self.is_running():
593             return "running since " + time.strftime('%Y-%m-%d %H:%M:%S',
594                                                     time.localtime(self._T0))        
595         if self.has_finished():
596             if self.is_timeout():
597                 return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S',
598                                                     time.localtime(self._Tf))
599             return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S',
600                                                      time.localtime(self._Tf))
601     
602 class Jobs(object):
603     '''Class to manage the jobs to be run
604     '''
605     def __init__(self,
606                  runner,
607                  logger,
608                  job_file_path,
609                  config_jobs,
610                  lenght_columns = 20):
611         # The jobs configuration
612         self.cfg_jobs = config_jobs
613         self.job_file_path = job_file_path
614         # The machine that will be used today
615         self.lmachines = []
616         # The list of machine (hosts, port) that will be used today 
617         # (a same host can have several machine instances since there 
618         # can be several ssh parameters) 
619         self.lhosts = []
620         # The jobs to be launched today 
621         self.ljobs = []
622         # The jobs that will not be launched today
623         self.ljobs_not_today = []
624         self.runner = runner
625         self.logger = logger
626         self.len_columns = lenght_columns
627         
628         # the list of jobs that have not been run yet
629         self._l_jobs_not_started = []
630         # the list of jobs that have already ran 
631         self._l_jobs_finished = []
632         # the list of jobs that are running 
633         self._l_jobs_running = [] 
634                 
635         self.determine_jobs_and_machines()
636     
637     def define_job(self, job_def, machine):
638         '''Takes a pyconf job definition and a machine (from class machine)
639            and returns the job instance corresponding to the definition.
640         
641         :param job_def src.config.Mapping: a job definition 
642         :param machine machine: the machine on which the job will run
643         :return: The corresponding job in a job class instance
644         :rtype: job
645         '''
646         name = job_def.name
647         cmmnds = job_def.commands
648         if not "timeout" in job_def:
649             timeout = 4*60*60 # default timeout = 4h
650         else:
651             timeout = job_def.timeout
652         after = None
653         if 'after' in job_def:
654             after = job_def.after
655         application = None
656         if 'application' in job_def:
657             application = job_def.application
658         board = None
659         if 'board' in job_def:
660             board = job_def.board
661         prefix = None
662         if "prefix" in job_def:
663             prefix = job_def.prefix
664             
665         return Job(name,
666                    machine,
667                    application,
668                    board,
669                    cmmnds,
670                    timeout,
671                    self.runner.cfg,
672                    self.logger,
673                    after = after,
674                    prefix = prefix)
675     
676     def determine_jobs_and_machines(self):
677         '''Function that reads the pyconf jobs definition and instantiates all
678            the machines and jobs to be done today.
679
680         :return: Nothing
681         :rtype: N\A
682         '''
683         today = datetime.date.weekday(datetime.date.today())
684         host_list = []
685                
686         for job_def in self.cfg_jobs.jobs :
687                 
688             if not "machine" in job_def:
689                 msg = _('WARNING: The job "%s" do not have the key '
690                        '"machine", this job is ignored.\n\n' % job_def.name)
691                 self.logger.write(src.printcolors.printcWarning(msg))
692                 continue
693             name_machine = job_def.machine
694             
695             a_machine = None
696             for mach in self.lmachines:
697                 if mach.name == name_machine:
698                     a_machine = mach
699                     break
700             
701             if a_machine == None:
702                 for machine_def in self.cfg_jobs.machines:
703                     if machine_def.name == name_machine:
704                         if 'host' not in machine_def:
705                             host = self.runner.cfg.VARS.hostname
706                         else:
707                             host = machine_def.host
708
709                         if 'user' not in machine_def:
710                             user = self.runner.cfg.VARS.user
711                         else:
712                             user = machine_def.user
713
714                         if 'port' not in machine_def:
715                             port = 22
716                         else:
717                             port = machine_def.port
718             
719                         if 'password' not in machine_def:
720                             passwd = None
721                         else:
722                             passwd = machine_def.password    
723                             
724                         if 'sat_path' not in machine_def:
725                             sat_path = "salomeTools"
726                         else:
727                             sat_path = machine_def.sat_path
728                         
729                         a_machine = Machine(
730                                             machine_def.name,
731                                             host,
732                                             user,
733                                             port=port,
734                                             passwd=passwd,
735                                             sat_path=sat_path
736                                             )
737                         
738                         self.lmachines.append(a_machine)
739                         if (host, port) not in host_list:
740                             host_list.append((host, port))
741                 
742                 if a_machine == None:
743                     msg = _("WARNING: The job \"%(job_name)s\" requires the "
744                             "machine \"%(machine_name)s\" but this machine "
745                             "is not defined in the configuration file.\n"
746                             "The job will not be launched")
747                     self.logger.write(src.printcolors.printcWarning(msg))
748                                   
749             a_job = self.define_job(job_def, a_machine)
750                 
751             if today in job_def.when:    
752                 self.ljobs.append(a_job)
753             else: # today in job_def.when
754                 self.ljobs_not_today.append(a_job)
755                
756         self.lhosts = host_list
757         
758     def ssh_connection_all_machines(self, pad=50):
759         '''Function that do the ssh connection to every machine 
760            to be used today.
761
762         :return: Nothing
763         :rtype: N\A
764         '''
765         self.logger.write(src.printcolors.printcInfo((
766                         "Establishing connection with all the machines :\n")))
767         for machine in self.lmachines:
768             # little algorithm in order to display traces
769             begin_line = (_("Connection to %s: " % machine.name))
770             if pad - len(begin_line) < 0:
771                 endline = " "
772             else:
773                 endline = (pad - len(begin_line)) * "." + " "
774             
775             step = "SSH connection"
776             self.logger.write( begin_line + endline + step)
777             self.logger.flush()
778             # the call to the method that initiate the ssh connection
779             msg = machine.connect(self.logger)
780             
781             # Copy salomeTools to the remote machine
782             if machine.successfully_connected(self.logger):
783                 step = _("Copy SAT")
784                 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
785                 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
786                 self.logger.flush()
787                 res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway,
788                                             self.job_file_path)
789                 # get the remote machine distribution using a sat command
790                 (__, out_dist, __) = machine.exec_command(
791                                 os.path.join(machine.sat_path,
792                                     "sat config --value VARS.dist --no_label"),
793                                 self.logger)
794                 machine.distribution = out_dist.read().decode().replace("\n",
795                                                                         "")
796                 # Print the status of the copy
797                 if res_copy == 0:
798                     self.logger.write('\r%s' % 
799                                 ((len(begin_line)+len(endline)+20) * " "), 3)
800                     self.logger.write('\r%s%s%s' % 
801                         (begin_line, 
802                          endline, 
803                          src.printcolors.printc(src.OK_STATUS)), 3)
804                 else:
805                     self.logger.write('\r%s' % 
806                             ((len(begin_line)+len(endline)+20) * " "), 3)
807                     self.logger.write('\r%s%s%s %s' % 
808                         (begin_line,
809                          endline,
810                          src.printcolors.printc(src.KO_STATUS),
811                          _("Copy of SAT failed: %s" % res_copy)), 3)
812             else:
813                 self.logger.write('\r%s' % 
814                                   ((len(begin_line)+len(endline)+20) * " "), 3)
815                 self.logger.write('\r%s%s%s %s' % 
816                     (begin_line,
817                      endline,
818                      src.printcolors.printc(src.KO_STATUS),
819                      msg), 3)
820             self.logger.write("\n", 3)
821                 
822         self.logger.write("\n")
823         
824
825     def is_occupied(self, hostname):
826         '''Function that returns True if a job is running on 
827            the machine defined by its host and its port.
828         
829         :param hostname (str, int): the pair (host, port)
830         :return: the job that is running on the host, 
831                 or false if there is no job running on the host. 
832         :rtype: job / bool
833         '''
834         host = hostname[0]
835         port = hostname[1]
836         for jb in self.ljobs:
837             if jb.machine.host == host and jb.machine.port == port:
838                 if jb.is_running():
839                     return jb
840         return False
841     
842     def update_jobs_states_list(self):
843         '''Function that updates the lists that store the currently
844            running jobs and the jobs that have already finished.
845         
846         :return: Nothing. 
847         :rtype: N\A
848         '''
849         jobs_finished_list = []
850         jobs_running_list = []
851         for jb in self.ljobs:
852             if jb.is_running():
853                 jobs_running_list.append(jb)
854                 jb.check_time()
855             if jb.has_finished():
856                 jobs_finished_list.append(jb)
857         
858         nb_job_finished_before = len(self._l_jobs_finished)
859         self._l_jobs_finished = jobs_finished_list
860         self._l_jobs_running = jobs_running_list
861         
862         nb_job_finished_now = len(self._l_jobs_finished)
863         
864         return nb_job_finished_now > nb_job_finished_before
865     
866     def cancel_dependencies_of_failing_jobs(self):
867         '''Function that cancels all the jobs that depend on a failing one.
868         
869         :return: Nothing. 
870         :rtype: N\A
871         '''
872         
873         for job in self.ljobs:
874             if job.after is None:
875                 continue
876             father_job = self.find_job_that_has_name(job.after)
877             if father_job is not None and father_job.has_failed():
878                 job.cancel()
879     
880     def find_job_that_has_name(self, name):
881         '''Returns the job by its name.
882         
883         :param name str: a job name
884         :return: the job that has the name. 
885         :rtype: job
886         '''
887         for jb in self.ljobs:
888             if jb.name == name:
889                 return jb
890         # the following is executed only if the job was not found
891         return None
892     
893     def str_of_length(self, text, length):
894         '''Takes a string text of any length and returns 
895            the most close string of length "length".
896         
897         :param text str: any string
898         :param length int: a length for the returned string
899         :return: the most close string of length "length"
900         :rtype: str
901         '''
902         if len(text) > length:
903             text_out = text[:length-3] + '...'
904         else:
905             diff = length - len(text)
906             before = " " * (diff//2)
907             after = " " * (diff//2 + diff%2)
908             text_out = before + text + after
909             
910         return text_out
911     
912     def display_status(self, len_col):
913         '''Takes a lenght and construct the display of the current status 
914            of the jobs in an array that has a column for each host.
915            It displays the job that is currently running on the host 
916            of the column.
917         
918         :param len_col int: the size of the column 
919         :return: Nothing
920         :rtype: N\A
921         '''
922         
923         display_line = ""
924         for host_port in self.lhosts:
925             jb = self.is_occupied(host_port)
926             if not jb: # nothing running on the host
927                 empty = self.str_of_length("empty", len_col)
928                 display_line += "|" + empty 
929             else:
930                 display_line += "|" + src.printcolors.printcInfo(
931                                         self.str_of_length(jb.name, len_col))
932         
933         self.logger.write("\r" + display_line + "|")
934         self.logger.flush()
935     
936
937     def run_jobs(self):
938         '''The main method. Runs all the jobs on every host. 
939            For each host, at a given time, only one job can be running.
940            The jobs that have the field after (that contain the job that has
941            to be run before it) are run after the previous job.
942            This method stops when all the jobs are finished.
943         
944         :return: Nothing
945         :rtype: N\A
946         '''
947
948         # Print header
949         self.logger.write(src.printcolors.printcInfo(
950                                                 _('Executing the jobs :\n')))
951         text_line = ""
952         for host_port in self.lhosts:
953             host = host_port[0]
954             port = host_port[1]
955             if port == 22: # default value
956                 text_line += "|" + self.str_of_length(host, self.len_columns)
957             else:
958                 text_line += "|" + self.str_of_length(
959                                 "("+host+", "+str(port)+")", self.len_columns)
960         
961         tiret_line = " " + "-"*(len(text_line)-1) + "\n"
962         self.logger.write(tiret_line)
963         self.logger.write(text_line + "|\n")
964         self.logger.write(tiret_line)
965         self.logger.flush()
966         
967         # The infinite loop that runs the jobs
968         l_jobs_not_started = src.deepcopy_list(self.ljobs)
969         while len(self._l_jobs_finished) != len(self.ljobs):
970             new_job_start = False
971             for host_port in self.lhosts:
972                 
973                 if self.is_occupied(host_port):
974                     continue
975              
976                 for jb in l_jobs_not_started:
977                     if (jb.machine.host, jb.machine.port) != host_port:
978                         continue 
979                     if jb.after == None:
980                         jb.run()
981                         l_jobs_not_started.remove(jb)
982                         new_job_start = True
983                         break
984                     else:
985                         jb_before = self.find_job_that_has_name(jb.after)
986                         if jb_before is None:
987                             jb.cancel()
988                             msg = _("This job was not launched because its "
989                                     "father is not in the jobs list.")
990                             jb.out = msg
991                             jb.err = msg
992                             break
993                         if jb_before.has_finished():
994                             jb.run()
995                             l_jobs_not_started.remove(jb)
996                             new_job_start = True
997                             break
998             self.cancel_dependencies_of_failing_jobs()
999             new_job_finished = self.update_jobs_states_list()
1000             
1001             if new_job_start or new_job_finished:
1002                 if self.gui:
1003                     self.gui.update_xml_files(self.ljobs)            
1004                 # Display the current status     
1005                 self.display_status(self.len_columns)
1006             
1007             # Make sure that the proc is not entirely busy
1008             time.sleep(0.001)
1009         
1010         self.logger.write("\n")    
1011         self.logger.write(tiret_line)                   
1012         self.logger.write("\n\n")
1013         
1014         if self.gui:
1015             self.gui.update_xml_files(self.ljobs)
1016             self.gui.last_update()
1017
1018     def write_all_results(self):
1019         '''Display all the jobs outputs.
1020         
1021         :return: Nothing
1022         :rtype: N\A
1023         '''
1024         
1025         for jb in self.ljobs:
1026             self.logger.write(src.printcolors.printcLabel(
1027                         "#------- Results for job %s -------#\n" % jb.name))
1028             jb.write_results()
1029             self.logger.write("\n\n")
1030
1031 class Gui(object):
1032     '''Class to manage the the xml data that can be displayed in a browser to
1033        see the jobs states
1034     '''
1035    
1036     def __init__(self, xml_dir_path, l_jobs, l_jobs_not_today, prefix, file_boards=""):
1037         '''Initialization
1038         
1039         :param xml_dir_path str: The path to the directory where to put 
1040                                  the xml resulting files
1041         :param l_jobs List: the list of jobs that run today
1042         :param l_jobs_not_today List: the list of jobs that do not run today
1043         :param file_boards str: the file path from which to read the
1044                                    expected boards
1045         '''
1046         # The prefix to add to the xml files : date_hour
1047         self.prefix = prefix
1048         
1049         # The path of the csv files to read to fill the expected boards
1050         self.file_boards = file_boards
1051         
1052         if file_boards != "":
1053             today = datetime.date.weekday(datetime.date.today())
1054             self.parse_csv_boards(today)
1055         else:
1056             self.d_input_boards = {}
1057         
1058         # The path of the global xml file
1059         self.xml_dir_path = xml_dir_path
1060         # Initialize the xml files
1061         self.global_name = "global_report"
1062         xml_global_path = os.path.join(self.xml_dir_path,
1063                                        self.global_name + ".xml")
1064         self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path,
1065                                                          "JobsReport")
1066
1067         # Find history for each job
1068         self.history = {}
1069         self.find_history(l_jobs, l_jobs_not_today)
1070
1071         # The xml files that corresponds to the boards.
1072         # {name_board : xml_object}}
1073         self.d_xml_board_files = {}
1074
1075         # Create the lines and columns
1076         self.initialize_boards(l_jobs, l_jobs_not_today)
1077         
1078         # Write the xml file
1079         self.update_xml_files(l_jobs)
1080     
1081     def add_xml_board(self, name):
1082         '''Add a board to the board list   
1083         :param name str: the board name
1084         '''
1085         xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
1086         self.d_xml_board_files[name] =  src.xmlManager.XmlLogFile(
1087                                                     xml_board_path,
1088                                                     "JobsReport")
1089         self.d_xml_board_files[name].add_simple_node("distributions")
1090         self.d_xml_board_files[name].add_simple_node("applications")
1091         self.d_xml_board_files[name].add_simple_node("board", text=name)
1092            
1093     def initialize_boards(self, l_jobs, l_jobs_not_today):
1094         '''Get all the first information needed for each file and write the 
1095            first version of the files   
1096         :param l_jobs List: the list of jobs that run today
1097         :param l_jobs_not_today List: the list of jobs that do not run today
1098         '''
1099         # Get the boards to fill and put it in a dictionary
1100         # {board_name : xml instance corresponding to the board}
1101         for job in l_jobs + l_jobs_not_today:
1102             board = job.board
1103             if (board is not None and 
1104                                 board not in self.d_xml_board_files.keys()):
1105                 self.add_xml_board(board)
1106         
1107         # Verify that the boards given as input are done
1108         for board in list(self.d_input_boards.keys()):
1109             if board not in self.d_xml_board_files:
1110                 self.add_xml_board(board)
1111             root_node = self.d_xml_board_files[board].xmlroot
1112             src.xmlManager.append_node_attrib(root_node, 
1113                                               {"input_file" : self.file_boards})
1114         
1115         # Loop over all jobs in order to get the lines and columns for each 
1116         # xml file
1117         d_dist = {}
1118         d_application = {}
1119         for board in self.d_xml_board_files:
1120             d_dist[board] = []
1121             d_application[board] = []
1122             
1123         l_hosts_ports = []
1124             
1125         for job in l_jobs + l_jobs_not_today:
1126             
1127             if (job.machine.host, job.machine.port) not in l_hosts_ports:
1128                 l_hosts_ports.append((job.machine.host, job.machine.port))
1129                 
1130             distrib = job.machine.distribution
1131             application = job.application
1132             
1133             board_job = job.board
1134             if board is None:
1135                 continue
1136             for board in self.d_xml_board_files:
1137                 if board_job == board:
1138                     if distrib is not None and distrib not in d_dist[board]:
1139                         d_dist[board].append(distrib)
1140                         src.xmlManager.add_simple_node(
1141                             self.d_xml_board_files[board].xmlroot.find(
1142                                                             'distributions'),
1143                                                    "dist",
1144                                                    attrib={"name" : distrib})
1145                     
1146                 if board_job == board:
1147                     if (application is not None and 
1148                                     application not in d_application[board]):
1149                         d_application[board].append(application)
1150                         src.xmlManager.add_simple_node(
1151                             self.d_xml_board_files[board].xmlroot.find(
1152                                                                 'applications'),
1153                                                    "application",
1154                                                    attrib={
1155                                                         "name" : application})
1156         
1157         # Verify that there are no missing application or distribution in the
1158         # xml board files (regarding the input boards)
1159         for board in self.d_xml_board_files:
1160             l_dist = d_dist[board]
1161             if board not in self.d_input_boards.keys():
1162                 continue
1163             for dist in self.d_input_boards[board]["rows"]:
1164                 if dist not in l_dist:
1165                     src.xmlManager.add_simple_node(
1166                             self.d_xml_board_files[board].xmlroot.find(
1167                                                             'distributions'),
1168                                                    "dist",
1169                                                    attrib={"name" : dist})
1170             l_appli = d_application[board]
1171             for appli in self.d_input_boards[board]["columns"]:
1172                 if appli not in l_appli:
1173                     src.xmlManager.add_simple_node(
1174                             self.d_xml_board_files[board].xmlroot.find(
1175                                                                 'applications'),
1176                                                    "application",
1177                                                    attrib={"name" : appli})
1178                 
1179         # Initialize the hosts_ports node for the global file
1180         self.xmlhosts_ports = self.xml_global_file.add_simple_node(
1181                                                                 "hosts_ports")
1182         for host, port in l_hosts_ports:
1183             host_port = "%s:%i" % (host, port)
1184             src.xmlManager.add_simple_node(self.xmlhosts_ports,
1185                                            "host_port",
1186                                            attrib={"name" : host_port})
1187         
1188         # Initialize the jobs node in all files
1189         for xml_file in [self.xml_global_file] + list(
1190                                             self.d_xml_board_files.values()):
1191             xml_jobs = xml_file.add_simple_node("jobs")      
1192             # Get the jobs present in the config file but 
1193             # that will not be launched today
1194             self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
1195             
1196             # add also the infos node
1197             xml_file.add_simple_node("infos",
1198                                      attrib={"name" : "last update",
1199                                              "JobsCommandStatus" : "running"})
1200             
1201             # and put the history node
1202             history_node = xml_file.add_simple_node("history")
1203             name_board = os.path.basename(xml_file.logFile)[:-len(".xml")]
1204             # serach for board files
1205             expression = "^[0-9]{8}_+[0-9]{6}_" + name_board + ".xml$"
1206             oExpr = re.compile(expression)
1207             # Get the list of xml borad files that are in the log directory
1208             for file_name in os.listdir(self.xml_dir_path):
1209                 if oExpr.search(file_name):
1210                     date = os.path.basename(file_name).split("_")[0]
1211                     file_path = os.path.join(self.xml_dir_path, file_name)
1212                     src.xmlManager.add_simple_node(history_node,
1213                                                    "link",
1214                                                    text=file_path,
1215                                                    attrib={"date" : date})      
1216             
1217                 
1218         # Find in each board the squares that needs to be filled regarding the
1219         # input csv files but that are not covered by a today job
1220         for board in self.d_input_boards.keys():
1221             xml_root_board = self.d_xml_board_files[board].xmlroot
1222             # Find the missing jobs for today
1223             xml_missing = src.xmlManager.add_simple_node(xml_root_board,
1224                                                  "missing_jobs")
1225             for row, column in self.d_input_boards[board]["jobs"]:
1226                 found = False
1227                 for job in l_jobs:
1228                     if (job.application == column and 
1229                         job.machine.distribution == row):
1230                         found = True
1231                         break
1232                 if not found:
1233                     src.xmlManager.add_simple_node(xml_missing,
1234                                             "job",
1235                                             attrib={"distribution" : row,
1236                                                     "application" : column })
1237             # Find the missing jobs not today
1238             xml_missing_not_today = src.xmlManager.add_simple_node(
1239                                                  xml_root_board,
1240                                                  "missing_jobs_not_today")
1241             for row, column in self.d_input_boards[board]["jobs_not_today"]:
1242                 found = False
1243                 for job in l_jobs_not_today:
1244                     if (job.application == column and 
1245                         job.machine.distribution == row):
1246                         found = True
1247                         break
1248                 if not found:
1249                     src.xmlManager.add_simple_node(xml_missing_not_today,
1250                                             "job",
1251                                             attrib={"distribution" : row,
1252                                                     "application" : column })
1253
1254     def find_history(self, l_jobs, l_jobs_not_today):
1255         """find, for each job, in the existent xml boards the results for the 
1256            job. Store the results in the dictionnary self.history = {name_job : 
1257            list of (date, status, list links)}
1258         
1259         :param l_jobs List: the list of jobs to run today   
1260         :param l_jobs_not_today List: the list of jobs that do not run today
1261         """
1262         # load the all the history
1263         expression = "^[0-9]{8}_+[0-9]{6}_" + self.global_name + ".xml$"
1264         oExpr = re.compile(expression)
1265         # Get the list of global xml that are in the log directory
1266         l_globalxml = []
1267         for file_name in os.listdir(self.xml_dir_path):
1268             if oExpr.search(file_name):
1269                 file_path = os.path.join(self.xml_dir_path, file_name)
1270                 global_xml = src.xmlManager.ReadXmlFile(file_path)
1271                 l_globalxml.append(global_xml)
1272
1273         # Construct the dictionnary self.history 
1274         for job in l_jobs + l_jobs_not_today:
1275             l_links = []
1276             for global_xml in l_globalxml:
1277                 date = os.path.basename(global_xml.filePath).split("_")[0]
1278                 global_root_node = global_xml.xmlroot.find("jobs")
1279                 job_node = src.xmlManager.find_node_by_attrib(
1280                                                               global_root_node,
1281                                                               "job",
1282                                                               "name",
1283                                                               job.name)
1284                 if job_node:
1285                     if job_node.find("remote_log_file_path") is not None:
1286                         link = job_node.find("remote_log_file_path").text
1287                         res_job = job_node.find("res").text
1288                         if link != "nothing":
1289                             l_links.append((date, res_job, link))
1290             l_links = sorted(l_links, reverse=True)
1291             self.history[job.name] = l_links
1292   
1293     def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
1294         '''Get all the first information needed for each file and write the 
1295            first version of the files   
1296
1297         :param xml_node_jobs etree.Element: the node corresponding to a job
1298         :param l_jobs_not_today List: the list of jobs that do not run today
1299         '''
1300         for job in l_jobs_not_today:
1301             xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1302                                                  "job",
1303                                                  attrib={"name" : job.name})
1304             src.xmlManager.add_simple_node(xmlj, "application", job.application)
1305             src.xmlManager.add_simple_node(xmlj,
1306                                            "distribution",
1307                                            job.machine.distribution)
1308             src.xmlManager.add_simple_node(xmlj, "board", job.board)
1309             src.xmlManager.add_simple_node(xmlj,
1310                                        "commands", " ; ".join(job.commands))
1311             src.xmlManager.add_simple_node(xmlj, "state", "Not today")
1312             src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1313             src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1314             src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1315             src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1316             src.xmlManager.add_simple_node(xmlj, "sat_path",
1317                                                         job.machine.sat_path)
1318             xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1319             for i, (date, res_job, link) in enumerate(self.history[job.name]):
1320                 if i==0:
1321                     # tag the first one (the last one)
1322                     src.xmlManager.add_simple_node(xml_history,
1323                                                    "link",
1324                                                    text=link,
1325                                                    attrib={"date" : date,
1326                                                            "res" : res_job,
1327                                                            "last" : "yes"})
1328                 else:
1329                     src.xmlManager.add_simple_node(xml_history,
1330                                                    "link",
1331                                                    text=link,
1332                                                    attrib={"date" : date,
1333                                                            "res" : res_job,
1334                                                            "last" : "no"})
1335
1336     def parse_csv_boards(self, today):
1337         """ Parse the csv file that describes the boards to produce and fill 
1338             the dict d_input_boards that contain the csv file contain
1339         
1340         :param today int: the current day of the week 
1341         """
1342         # open the csv file and read its content
1343         l_read = []
1344         with open(self.file_boards, 'r') as f:
1345             reader = csv.reader(f,delimiter=CSV_DELIMITER)
1346             for row in reader:
1347                 l_read.append(row)
1348         # get the delimiter for the boards (empty line)
1349         boards_delimiter = [''] * len(l_read[0])
1350         # Make the list of boards, by splitting with the delimiter
1351         l_boards = [list(y) for x, y in itertools.groupby(l_read,
1352                                     lambda z: z == boards_delimiter) if not x]
1353            
1354         # loop over the csv lists of lines and get the rows, columns and jobs
1355         d_boards = {}
1356         for input_board in l_boards:
1357             # get board name
1358             board_name = input_board[0][0]
1359             
1360             # Get columns list
1361             columns = input_board[0][1:]
1362             
1363             rows = []
1364             jobs = []
1365             jobs_not_today = []
1366             for line in input_board[1:]:
1367                 row = line[0]
1368                 rows.append(row)
1369                 for i, square in enumerate(line[1:]):
1370                     if square=='':
1371                         continue
1372                     days = square.split(DAYS_SEPARATOR)
1373                     days = [int(day) for day in days]
1374                     job = (row, columns[i])
1375                     if today in days:                           
1376                         jobs.append(job)
1377                     else:
1378                         jobs_not_today.append(job)
1379
1380             d_boards[board_name] = {"rows" : rows,
1381                                     "columns" : columns,
1382                                     "jobs" : jobs,
1383                                     "jobs_not_today" : jobs_not_today}
1384         
1385         self.d_input_boards = d_boards
1386
1387     def update_xml_files(self, l_jobs):
1388         '''Write all the xml files with updated information about the jobs   
1389
1390         :param l_jobs List: the list of jobs that run today
1391         '''
1392         for xml_file in [self.xml_global_file] + list(
1393                                             self.d_xml_board_files.values()):
1394             self.update_xml_file(l_jobs, xml_file)
1395             
1396         # Write the file
1397         self.write_xml_files()
1398             
1399     def update_xml_file(self, l_jobs, xml_file):      
1400         '''update information about the jobs for the file xml_file   
1401
1402         :param l_jobs List: the list of jobs that run today
1403         :param xml_file xmlManager.XmlLogFile: the xml instance to update
1404         '''
1405         
1406         xml_node_jobs = xml_file.xmlroot.find('jobs')
1407         # Update the job names and status node
1408         for job in l_jobs:
1409             # Find the node corresponding to the job and delete it
1410             # in order to recreate it
1411             for xmljob in xml_node_jobs.findall('job'):
1412                 if xmljob.attrib['name'] == job.name:
1413                     xml_node_jobs.remove(xmljob)
1414             
1415             T0 = str(job._T0)
1416             if T0 != "-1":
1417                 T0 = time.strftime('%Y-%m-%d %H:%M:%S', 
1418                                        time.localtime(job._T0))
1419             Tf = str(job._Tf)
1420             if Tf != "-1":
1421                 Tf = time.strftime('%Y-%m-%d %H:%M:%S', 
1422                                        time.localtime(job._Tf))
1423             
1424             # recreate the job node
1425             xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1426                                                   "job",
1427                                                   attrib={"name" : job.name})
1428             src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1429             src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1430             src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1431             src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1432             xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1433             for date, res_job, link in self.history[job.name]:
1434                 src.xmlManager.add_simple_node(xml_history,
1435                                                "link",
1436                                                text=link,
1437                                                attrib={"date" : date,
1438                                                        "res" : res_job})
1439
1440             src.xmlManager.add_simple_node(xmlj, "sat_path",
1441                                            job.machine.sat_path)
1442             src.xmlManager.add_simple_node(xmlj, "application", job.application)
1443             src.xmlManager.add_simple_node(xmlj, "distribution",
1444                                            job.machine.distribution)
1445             src.xmlManager.add_simple_node(xmlj, "board", job.board)
1446             src.xmlManager.add_simple_node(xmlj, "timeout", str(job.timeout))
1447             src.xmlManager.add_simple_node(xmlj, "commands",
1448                                            " ; ".join(job.commands))
1449             src.xmlManager.add_simple_node(xmlj, "state", job.get_status())
1450             src.xmlManager.add_simple_node(xmlj, "begin", T0)
1451             src.xmlManager.add_simple_node(xmlj, "end", Tf)
1452             src.xmlManager.add_simple_node(xmlj, "out",
1453                                            src.printcolors.cleancolor(job.out))
1454             src.xmlManager.add_simple_node(xmlj, "err",
1455                                            src.printcolors.cleancolor(job.err))
1456             src.xmlManager.add_simple_node(xmlj, "res", str(job.res_job))
1457             if len(job.remote_log_files) > 0:
1458                 src.xmlManager.add_simple_node(xmlj,
1459                                                "remote_log_file_path",
1460                                                job.remote_log_files[0])
1461             else:
1462                 src.xmlManager.add_simple_node(xmlj,
1463                                                "remote_log_file_path",
1464                                                "nothing")           
1465             
1466             xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
1467             # get the job father
1468             if job.after is not None:
1469                 job_father = None
1470                 for jb in l_jobs:
1471                     if jb.name == job.after:
1472                         job_father = jb
1473                 
1474                 if (job_father is not None and 
1475                         len(job_father.remote_log_files) > 0):
1476                     link = job_father.remote_log_files[0]
1477                 else:
1478                     link = "nothing"
1479                 src.xmlManager.append_node_attrib(xmlafter, {"link" : link})
1480             
1481             # Verify that the job is to be done today regarding the input csv
1482             # files
1483             if job.board and job.board in self.d_input_boards.keys():
1484                 found = False
1485                 for dist, appli in self.d_input_boards[job.board]["jobs"]:
1486                     if (job.machine.distribution == dist 
1487                         and job.application == appli):
1488                         found = True
1489                         src.xmlManager.add_simple_node(xmlj,
1490                                                "extra_job",
1491                                                "no")
1492                         break
1493                 if not found:
1494                     src.xmlManager.add_simple_node(xmlj,
1495                                                "extra_job",
1496                                                "yes")
1497             
1498         
1499         # Update the date
1500         xml_node_infos = xml_file.xmlroot.find('infos')
1501         src.xmlManager.append_node_attrib(xml_node_infos,
1502                     attrib={"value" : 
1503                     datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
1504                
1505
1506     
1507     def last_update(self, finish_status = "finished"):
1508         '''update information about the jobs for the file xml_file   
1509
1510         :param l_jobs List: the list of jobs that run today
1511         :param xml_file xmlManager.XmlLogFile: the xml instance to update
1512         '''
1513         for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
1514             xml_node_infos = xml_file.xmlroot.find('infos')
1515             src.xmlManager.append_node_attrib(xml_node_infos,
1516                         attrib={"JobsCommandStatus" : finish_status})
1517         # Write the file
1518         self.write_xml_files()
1519
1520     def write_xml_file(self, xml_file, stylesheet):
1521         ''' Write one xml file and the same file with prefix
1522         '''
1523         xml_file.write_tree(stylesheet)
1524         file_path = xml_file.logFile
1525         file_dir = os.path.dirname(file_path)
1526         file_name = os.path.basename(file_path)
1527         file_name_with_prefix = self.prefix + "_" + file_name
1528         xml_file.write_tree(stylesheet, os.path.join(file_dir,
1529                                                      file_name_with_prefix))
1530         
1531     def write_xml_files(self):
1532         ''' Write the xml files   
1533         '''
1534         self.write_xml_file(self.xml_global_file, STYLESHEET_GLOBAL)
1535         for xml_file in self.d_xml_board_files.values():
1536             self.write_xml_file(xml_file, STYLESHEET_BOARD)
1537
1538
1539 ##
1540 # Describes the command
1541 def description():
1542     return _("The jobs command launches maintenances that are described"
1543              " in the dedicated jobs configuration file.\n\nexample:\nsat "
1544              "jobs --name my_jobs --publish")
1545
1546 ##
1547 # Runs the command.
1548 def run(args, runner, logger):
1549        
1550     (options, args) = parser.parse_args(args)
1551        
1552     l_cfg_dir = runner.cfg.PATHS.JOBPATH
1553     
1554     # list option : display all the available config files
1555     if options.list:
1556         for cfg_dir in l_cfg_dir:
1557             if not options.no_label:
1558                 logger.write("------ %s\n" % 
1559                                  src.printcolors.printcHeader(cfg_dir))
1560     
1561             for f in sorted(os.listdir(cfg_dir)):
1562                 if not f.endswith('.pyconf'):
1563                     continue
1564                 cfilename = f[:-7]
1565                 logger.write("%s\n" % cfilename)
1566         return 0
1567
1568     # Make sure the jobs_config option has been called
1569     if not options.jobs_cfg:
1570         message = _("The option --jobs_config is required\n")      
1571         src.printcolors.printcError(message)
1572         return 1
1573     
1574     # Find the file in the directories
1575     found = False
1576     for cfg_dir in l_cfg_dir:
1577         file_jobs_cfg = os.path.join(cfg_dir, options.jobs_cfg)
1578         if not file_jobs_cfg.endswith('.pyconf'):
1579             file_jobs_cfg += '.pyconf'
1580         
1581         if not os.path.exists(file_jobs_cfg):
1582             continue
1583         else:
1584             found = True
1585             break
1586     
1587     if not found:
1588         msg = _("The file configuration %(name_file)s was not found."
1589                 "\nUse the --list option to get the possible files.")
1590         src.printcolors.printcError(msg)
1591         return 1
1592     
1593     info = [
1594         (_("Platform"), runner.cfg.VARS.dist),
1595         (_("File containing the jobs configuration"), file_jobs_cfg)
1596     ]    
1597     src.print_info(logger, info)
1598
1599     # Read the config that is in the file
1600     config_jobs = src.read_config_from_a_file(file_jobs_cfg)
1601     if options.only_jobs:
1602         l_jb = src.pyconf.Sequence()
1603         for jb in config_jobs.jobs:
1604             if jb.name in options.only_jobs:
1605                 l_jb.append(jb,
1606                 "Adding a job that was given in only_jobs option parameters")
1607         config_jobs.jobs = l_jb
1608      
1609     # Initialization
1610     today_jobs = Jobs(runner,
1611                       logger,
1612                       file_jobs_cfg,
1613                       config_jobs)
1614     # SSH connection to all machines
1615     today_jobs.ssh_connection_all_machines()
1616     if options.test_connection:
1617         return 0
1618     
1619     gui = None
1620     if options.publish:
1621         # Copy the stylesheets in the log directory 
1622         log_dir = runner.cfg.USER.log_dir
1623         xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
1624         files_to_copy = []
1625         files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
1626         files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD))
1627         files_to_copy.append(os.path.join(xsl_dir, "running.gif"))
1628         for file_path in files_to_copy:
1629             shutil.copy2(file_path, log_dir)
1630         
1631         # Instanciate the Gui in order to produce the xml files that contain all
1632         # the boards
1633         gui = Gui(runner.cfg.USER.log_dir,
1634                   today_jobs.ljobs,
1635                   today_jobs.ljobs_not_today,
1636                   runner.cfg.VARS.datehour,
1637                   file_boards = options.input_boards)
1638         
1639         # Display the list of the xml files
1640         logger.write(src.printcolors.printcInfo(("Here is the list of published"
1641                                                  " files :\n")), 4)
1642         logger.write("%s\n" % gui.xml_global_file.logFile, 4)
1643         for board in gui.d_xml_board_files.keys():
1644             file_path = gui.d_xml_board_files[board].logFile
1645             file_name = os.path.basename(file_path)
1646             logger.write("%s\n" % file_path, 4)
1647             logger.add_link(file_name, "board", 0, board)
1648         
1649         logger.write("\n", 4)
1650     
1651     today_jobs.gui = gui
1652     
1653     interruped = False
1654     try:
1655         # Run all the jobs contained in config_jobs
1656         today_jobs.run_jobs()
1657     except KeyboardInterrupt:
1658         interruped = True
1659         logger.write("\n\n%s\n\n" % 
1660                 (src.printcolors.printcWarning(_("Forced interruption"))), 1)
1661     finally:
1662         if interruped:
1663             msg = _("Killing the running jobs and trying"
1664                     " to get the corresponding logs\n")
1665             logger.write(src.printcolors.printcWarning(msg))
1666             
1667         # find the potential not finished jobs and kill them
1668         for jb in today_jobs.ljobs:
1669             if not jb.has_finished():
1670                 try:
1671                     jb.kill_remote_process()
1672                 except Exception as e:
1673                     msg = _("Failed to kill job %s: %s\n" % (jb.name, e))
1674                     logger.write(src.printcolors.printcWarning(msg))
1675         if interruped:
1676             if today_jobs.gui:
1677                 today_jobs.gui.last_update(_("Forced interruption"))
1678         else:
1679             if today_jobs.gui:
1680                 today_jobs.gui.last_update()
1681         # Output the results
1682         today_jobs.write_all_results()