Salome HOME
bug fix for jobs command with multple users
[tools/sat.git] / commands / jobs.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2013  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import sys
21 import tempfile
22 import traceback
23 import datetime
24 import time
25 import csv
26 import shutil
27 import itertools
28 import re
29 import paramiko
30
31 import src
32 import src.ElementTree as etree
33
34 STYLESHEET_GLOBAL = "jobs_global_report.xsl"
35 STYLESHEET_BOARD = "jobs_board_report.xsl"
36
37 DAYS_SEPARATOR = ","
38 CSV_DELIMITER = ";"
39
40 parser = src.options.Options()
41
42 parser.add_option('n', 'name', 'list2', 'jobs_cfg', 
43                   _('Mandatory: The name of the config file that contains'
44                   ' the jobs configuration. Can be a list.'))
45 parser.add_option('o', 'only_jobs', 'list2', 'only_jobs',
46                   _('Optional: the list of jobs to launch, by their name. '))
47 parser.add_option('l', 'list', 'boolean', 'list', 
48                   _('Optional: list all available config files.'))
49 parser.add_option('t', 'test_connection', 'boolean', 'test_connection',
50                   _("Optional: try to connect to the machines. "
51                     "Not executing the jobs."),
52                   False)
53 parser.add_option('p', 'publish', 'boolean', 'publish',
54                   _("Optional: generate an xml file that can be read in a "
55                     "browser to display the jobs status."),
56                   False)
57 parser.add_option('i', 'input_boards', 'string', 'input_boards', _("Optional: "
58                                 "the path to csv file that contain "
59                                 "the expected boards."),"")
60 parser.add_option('', 'completion', 'boolean', 'no_label',
61                   _("Optional (internal use): do not print labels, Works only "
62                     "with --list."),
63                   False)
64
65 class Machine(object):
66     '''Class to manage a ssh connection on a machine
67     '''
68     def __init__(self,
69                  name,
70                  host,
71                  user,
72                  port=22,
73                  passwd=None,
74                  sat_path="salomeTools"):
75         self.name = name
76         self.host = host
77         self.port = port
78         self.distribution = None # Will be filled after copying SAT on the machine
79         self.user = user
80         self.password = passwd
81         self.sat_path = sat_path
82         self.ssh = paramiko.SSHClient()
83         self._connection_successful = None
84     
85     def connect(self, logger):
86         '''Initiate the ssh connection to the remote machine
87         
88         :param logger src.logger.Logger: The logger instance 
89         :return: Nothing
90         :rtype: N\A
91         '''
92
93         self._connection_successful = False
94         self.ssh.load_system_host_keys()
95         self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
96         try:
97             self.ssh.connect(self.host,
98                              port=self.port,
99                              username=self.user,
100                              password = self.password)
101         except paramiko.AuthenticationException:
102             message = src.KO_STATUS + _("Authentication failed")
103         except paramiko.BadHostKeyException:
104             message = (src.KO_STATUS + 
105                        _("The server's host key could not be verified"))
106         except paramiko.SSHException:
107             message = ( _("SSHException error connecting or "
108                           "establishing an SSH session"))            
109         except:
110             message = ( _("Error connecting or establishing an SSH session"))
111         else:
112             self._connection_successful = True
113             message = ""
114         return message
115     
116     def successfully_connected(self, logger):
117         '''Verify if the connection to the remote machine has succeed
118         
119         :param logger src.logger.Logger: The logger instance 
120         :return: True if the connection has succeed, False if not
121         :rtype: bool
122         '''
123         if self._connection_successful == None:
124             message = _("Warning : trying to ask if the connection to "
125             "(name: %s host: %s, port: %s, user: %s) is OK whereas there were"
126             " no connection request" % 
127                         (self.name, self.host, self.port, self.user))
128             logger.write( src.printcolors.printcWarning(message))
129         return self._connection_successful
130
131     def copy_sat(self, sat_local_path, job_file):
132         '''Copy salomeTools to the remote machine in self.sat_path
133         '''
134         res = 0
135         try:
136             # open a sftp connection
137             self.sftp = self.ssh.open_sftp()
138             # Create the sat directory on remote machine if it is not existing
139             self.mkdir(self.sat_path, ignore_existing=True)
140             # Put sat
141             self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
142             # put the job configuration file in order to make it reachable 
143             # on the remote machine
144             remote_job_file_name = ".%s" % os.path.basename(job_file)
145             self.sftp.put(job_file, os.path.join(self.sat_path,
146                                                  remote_job_file_name))
147         except Exception as e:
148             res = str(e)
149             self._connection_successful = False
150         
151         return res
152         
153     def put_dir(self, source, target, filters = []):
154         ''' Uploads the contents of the source directory to the target path. The
155             target directory needs to exists. All sub-directories in source are 
156             created under target.
157         '''
158         for item in os.listdir(source):
159             if item in filters:
160                 continue
161             source_path = os.path.join(source, item)
162             destination_path = os.path.join(target, item)
163             if os.path.islink(source_path):
164                 linkto = os.readlink(source_path)
165                 try:
166                     self.sftp.symlink(linkto, destination_path)
167                     self.sftp.chmod(destination_path,
168                                     os.stat(source_path).st_mode)
169                 except IOError:
170                     pass
171             else:
172                 if os.path.isfile(source_path):
173                     self.sftp.put(source_path, destination_path)
174                     self.sftp.chmod(destination_path,
175                                     os.stat(source_path).st_mode)
176                 else:
177                     self.mkdir(destination_path, ignore_existing=True)
178                     self.put_dir(source_path, destination_path)
179
180     def mkdir(self, path, mode=511, ignore_existing=False):
181         ''' Augments mkdir by adding an option to not fail 
182             if the folder exists 
183         '''
184         try:
185             self.sftp.mkdir(path, mode)
186         except IOError:
187             if ignore_existing:
188                 pass
189             else:
190                 raise       
191     
192     def exec_command(self, command, logger):
193         '''Execute the command on the remote machine
194         
195         :param command str: The command to be run
196         :param logger src.logger.Logger: The logger instance 
197         :return: the stdin, stdout, and stderr of the executing command,
198                  as a 3-tuple
199         :rtype: (paramiko.channel.ChannelFile, paramiko.channel.ChannelFile,
200                 paramiko.channel.ChannelFile)
201         '''
202         try:        
203             # Does not wait the end of the command
204             (stdin, stdout, stderr) = self.ssh.exec_command(command)
205         except paramiko.SSHException:
206             message = src.KO_STATUS + _(
207                             ": the server failed to execute the command\n")
208             logger.write( src.printcolors.printcError(message))
209             return (None, None, None)
210         except:
211             logger.write( src.printcolors.printcError(src.KO_STATUS + '\n'))
212             return (None, None, None)
213         else:
214             return (stdin, stdout, stderr)
215
216     def close(self):
217         '''Close the ssh connection
218         
219         :rtype: N\A
220         '''
221         self.ssh.close()
222      
223     def write_info(self, logger):
224         '''Prints the informations relative to the machine in the logger 
225            (terminal traces and log file)
226         
227         :param logger src.logger.Logger: The logger instance
228         :return: Nothing
229         :rtype: N\A
230         '''
231         logger.write("host : " + self.host + "\n")
232         logger.write("port : " + str(self.port) + "\n")
233         logger.write("user : " + str(self.user) + "\n")
234         if self.successfully_connected(logger):
235             status = src.OK_STATUS
236         else:
237             status = src.KO_STATUS
238         logger.write("Connection : " + status + "\n\n") 
239
240
241 class Job(object):
242     '''Class to manage one job
243     '''
244     def __init__(self,
245                  name,
246                  machine,
247                  application,
248                  board, 
249                  commands,
250                  timeout,
251                  config,
252                  job_file_path,
253                  logger,
254                  after=None,
255                  prefix=None):
256
257         self.name = name
258         self.machine = machine
259         self.after = after
260         self.timeout = timeout
261         self.application = application
262         self.board = board
263         self.config = config
264         self.logger = logger
265         # The list of log files to download from the remote machine 
266         self.remote_log_files = []
267         
268         # The remote command status
269         # -1 means that it has not been launched, 
270         # 0 means success and 1 means fail
271         self.res_job = "-1"
272         self.cancelled = False
273         
274         self._T0 = -1
275         self._Tf = -1
276         self._has_begun = False
277         self._has_finished = False
278         self._has_timouted = False
279         self._stdin = None # Store the command inputs field
280         self._stdout = None # Store the command outputs field
281         self._stderr = None # Store the command errors field
282
283         self.out = ""
284         self.err = ""
285         
286         self.name_remote_jobs_pyconf = ".%s" % os.path.basename(job_file_path)
287         self.commands = commands
288         self.command = (os.path.join(self.machine.sat_path, "sat") +
289                         " -l " +
290                         os.path.join(self.machine.sat_path,
291                                      "list_log_files.txt") +
292                         " job --jobs_config " + 
293                         os.path.join(self.machine.sat_path,
294                                      self.name_remote_jobs_pyconf) +
295                         " --name " +
296                         self.name)
297         if prefix:
298             self.command = prefix + ' "' + self.command +'"'
299     
300     def get_pids(self):
301         """ Get the pid(s) corresponding to the command that have been launched
302             On the remote machine
303         
304         :return: The list of integers corresponding to the found pids
305         :rtype: List
306         """
307         pids = []
308         cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\''
309         (_, out_pid, _) = self.machine.exec_command(cmd_pid, self.logger)
310         pids_cmd = out_pid.readlines()
311         pids_cmd = [str(src.only_numbers(pid)) for pid in pids_cmd]
312         pids+=pids_cmd
313         return pids
314     
315     def kill_remote_process(self, wait=1):
316         '''Kills the process on the remote machine.
317         
318         :return: (the output of the kill, the error of the kill)
319         :rtype: (str, str)
320         '''
321         try:
322             pids = self.get_pids()
323         except:
324             return ("Unable to get the pid of the command.", "")
325             
326         cmd_kill = " ; ".join([("kill -2 " + pid) for pid in pids])
327         (_, out_kill, err_kill) = self.machine.exec_command(cmd_kill, 
328                                                             self.logger)
329         time.sleep(wait)
330         return (out_kill.read().decode(), err_kill.read().decode())
331             
332     def has_begun(self):
333         '''Returns True if the job has already begun
334         
335         :return: True if the job has already begun
336         :rtype: bool
337         '''
338         return self._has_begun
339     
340     def has_finished(self):
341         '''Returns True if the job has already finished 
342            (i.e. all the commands have been executed)
343            If it is finished, the outputs are stored in the fields out and err.
344         
345         :return: True if the job has already finished
346         :rtype: bool
347         '''
348         
349         # If the method has already been called and returned True
350         if self._has_finished:
351             return True
352         
353         # If the job has not begun yet
354         if not self.has_begun():
355             return False
356         
357         if self._stdout.channel.closed:
358             self._has_finished = True
359             # Store the result outputs
360             self.out += self._stdout.read().decode()
361             self.err += self._stderr.read().decode()
362             # Put end time
363             self._Tf = time.time()
364             # And get the remote command status and log files
365             try:
366                 self.get_log_files()
367             except Exception as e:
368                 self.err += _("Unable to get remote log files: %s" % e)
369         
370         return self._has_finished
371           
372     def get_log_files(self):
373         """Get the log files produced by the command launched 
374            on the remote machine, and put it in the log directory of the user,
375            so they can be accessible from 
376         """
377         # Do not get the files if the command is not finished
378         if not self.has_finished():
379             msg = _("Trying to get log files whereas the job is not finished.")
380             self.logger.write(src.printcolors.printcWarning(msg))
381             return
382         
383         # First get the file that contains the list of log files to get
384         tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
385         remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt")
386         self.machine.sftp.get(
387                     remote_path,
388                     tmp_file_path)
389         
390         # Read the file and get the result of the command and all the log files
391         # to get
392         fstream_tmp = open(tmp_file_path, "r")
393         file_lines = fstream_tmp.readlines()
394         file_lines = [line.replace("\n", "") for line in file_lines]
395         fstream_tmp.close()
396         os.remove(tmp_file_path)
397         
398         try :
399             # The first line is the result of the command (0 success or 1 fail)
400             self.res_job = file_lines[0]
401         except Exception as e:
402             self.err += _("Unable to get status from remote file %s: %s" % 
403                                                     (remote_path, str(e)))
404
405         for i, job_path_remote in enumerate(file_lines[1:]):
406             try:
407                 # For each command, there is two files to get :
408                 # 1- The xml file describing the command and giving the 
409                 # internal traces.
410                 # 2- The txt file containing the system command traces (like 
411                 # traces produced by the "make" command)
412                 # 3- In case of the test command, there is another file to get :
413                 # the xml board that contain the test results
414                 dirname = os.path.basename(os.path.dirname(job_path_remote))
415                 if dirname != 'OUT' and dirname != 'TEST':
416                     # Case 1-
417                     local_path = os.path.join(os.path.dirname(
418                                                         self.logger.logFilePath),
419                                               os.path.basename(job_path_remote))
420                     if i==0: # The first is the job command
421                         self.logger.add_link(os.path.basename(job_path_remote),
422                                              "job",
423                                              self.res_job,
424                                              self.command) 
425                 elif dirname == 'OUT':
426                     # Case 2-
427                     local_path = os.path.join(os.path.dirname(
428                                                         self.logger.logFilePath),
429                                               'OUT',
430                                               os.path.basename(job_path_remote))
431                 elif dirname == 'TEST':
432                     # Case 3-
433                     local_path = os.path.join(os.path.dirname(
434                                                         self.logger.logFilePath),
435                                               'TEST',
436                                               os.path.basename(job_path_remote))
437                 
438                 # Get the file
439                 if not os.path.exists(local_path):
440                     self.machine.sftp.get(job_path_remote, local_path)
441                 self.remote_log_files.append(local_path)
442             except Exception as e:
443                 self.err += _("Unable to get %s log file from remote: %s" % 
444                                                     (str(job_path_remote),
445                                                      str(e)))
446
447     def has_failed(self):
448         '''Returns True if the job has failed. 
449            A job is considered as failed if the machine could not be reached,
450            if the remote command failed, 
451            or if the job finished with a time out.
452         
453         :return: True if the job has failed
454         :rtype: bool
455         '''
456         if not self.has_finished():
457             return False
458         if not self.machine.successfully_connected(self.logger):
459             return True
460         if self.is_timeout():
461             return True
462         if self.res_job == "1":
463             return True
464         return False
465     
466     def cancel(self):
467         """In case of a failing job, one has to cancel every job that depend 
468            on it. This method put the job as failed and will not be executed.
469         """
470         if self.cancelled:
471             return
472         self._has_begun = True
473         self._has_finished = True
474         self.cancelled = True
475         self.out += _("This job was not launched because its father has failed.")
476         self.err += _("This job was not launched because its father has failed.")
477
478     def is_running(self):
479         '''Returns True if the job commands are running 
480         
481         :return: True if the job is running
482         :rtype: bool
483         '''
484         return self.has_begun() and not self.has_finished()
485
486     def is_timeout(self):
487         '''Returns True if the job commands has finished with timeout 
488         
489         :return: True if the job has finished with timeout
490         :rtype: bool
491         '''
492         return self._has_timouted
493
494     def time_elapsed(self):
495         """Get the time elapsed since the job launching
496         
497         :return: The number of seconds
498         :rtype: int
499         """
500         if not self.has_begun():
501             return -1
502         T_now = time.time()
503         return T_now - self._T0
504     
505     def check_time(self):
506         """Verify that the job has not exceeded its timeout.
507            If it has, kill the remote command and consider the job as finished.
508         """
509         if not self.has_begun():
510             return
511         if self.time_elapsed() > self.timeout:
512             self._has_finished = True
513             self._has_timouted = True
514             self._Tf = time.time()
515             (out_kill, __) = self.kill_remote_process()
516             self.out += "TIMEOUT \n" + out_kill
517             self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
518             try:
519                 self.get_log_files()
520             except Exception as e:
521                 # The 2 following lines must be suppressed after the bug is fixed
522                 print("The error type: ")
523                 print(type(e))
524                 print("The error: ")
525                 print(e)
526                 print("Local scope:")
527                 print(dir())
528                 print("The str type: ")
529                 print(type(str))
530                 print("str: ")
531                 print(str)
532                 self.err += _("Unable to get remote log files!")
533             
534     def total_duration(self):
535         """Give the total duration of the job
536         
537         :return: the total duration of the job in seconds
538         :rtype: int
539         """
540         return self._Tf - self._T0
541         
542     def run(self):
543         """Launch the job by executing the remote command.
544         """
545         
546         # Prevent multiple run
547         if self.has_begun():
548             msg = _("Warning: A job can only be launched one time")
549             msg2 = _("Trying to launch the job \"%s\" whereas it has "
550                      "already been launched." % self.name)
551             self.logger.write(src.printcolors.printcWarning("%s\n%s\n" % (msg,
552                                                                         msg2)))
553             return
554         
555         # Do not execute the command if the machine could not be reached
556         if not self.machine.successfully_connected(self.logger):
557             self._has_finished = True
558             self.out = "N\A"
559             self.err += ("Connection to machine (name : %s, host: %s, port:"
560                         " %s, user: %s) has failed\nUse the log command "
561                         "to get more information."
562                         % (self.machine.name,
563                            self.machine.host,
564                            self.machine.port,
565                            self.machine.user))
566         else:
567             # Usual case : Launch the command on remote machine
568             self._T0 = time.time()
569             self._stdin, self._stdout, self._stderr = self.machine.exec_command(
570                                                                   self.command,
571                                                                   self.logger)
572             # If the results are not initialized, finish the job
573             if (self._stdin, self._stdout, self._stderr) == (None, None, None):
574                 self._has_finished = True
575                 self._Tf = time.time()
576                 self.out += "N\A"
577                 self.err += "The server failed to execute the command"
578         
579         # Put the beginning flag to true.
580         self._has_begun = True
581     
582     def write_results(self):
583         """Display on the terminal all the job's information
584         """
585         self.logger.write("name : " + self.name + "\n")
586         if self.after:
587             self.logger.write("after : %s\n" % self.after)
588         self.logger.write("Time elapsed : %4imin %2is \n" % 
589                      (self.total_duration()//60 , self.total_duration()%60))
590         if self._T0 != -1:
591             self.logger.write("Begin time : %s\n" % 
592                          time.strftime('%Y-%m-%d %H:%M:%S', 
593                                        time.localtime(self._T0)) )
594         if self._Tf != -1:
595             self.logger.write("End time   : %s\n\n" % 
596                          time.strftime('%Y-%m-%d %H:%M:%S', 
597                                        time.localtime(self._Tf)) )
598         
599         machine_head = "Informations about connection :\n"
600         underline = (len(machine_head) - 2) * "-"
601         self.logger.write(src.printcolors.printcInfo(
602                                                 machine_head+underline+"\n"))
603         self.machine.write_info(self.logger)
604         
605         self.logger.write(src.printcolors.printcInfo("out : \n"))
606         if self.out == "":
607             self.logger.write("Unable to get output\n")
608         else:
609             self.logger.write(self.out + "\n")
610         self.logger.write(src.printcolors.printcInfo("err : \n"))
611         self.logger.write(self.err + "\n")
612         
613     def get_status(self):
614         """Get the status of the job (used by the Gui for xml display)
615         
616         :return: The current status of the job
617         :rtype: String
618         """
619         if not self.machine.successfully_connected(self.logger):
620             return "SSH connection KO"
621         if not self.has_begun():
622             return "Not launched"
623         if self.cancelled:
624             return "Cancelled"
625         if self.is_running():
626             return "running since " + time.strftime('%Y-%m-%d %H:%M:%S',
627                                                     time.localtime(self._T0))        
628         if self.has_finished():
629             if self.is_timeout():
630                 return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S',
631                                                     time.localtime(self._Tf))
632             return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S',
633                                                      time.localtime(self._Tf))
634     
635 class Jobs(object):
636     '''Class to manage the jobs to be run
637     '''
638     def __init__(self,
639                  runner,
640                  logger,
641                  job_file_path,
642                  config_jobs,
643                  lenght_columns = 20):
644         # The jobs configuration
645         self.cfg_jobs = config_jobs
646         self.job_file_path = job_file_path
647         # The machine that will be used today
648         self.lmachines = []
649         # The list of machine (hosts, port) that will be used today 
650         # (a same host can have several machine instances since there 
651         # can be several ssh parameters) 
652         self.lhosts = []
653         # The jobs to be launched today 
654         self.ljobs = []
655         # The jobs that will not be launched today
656         self.ljobs_not_today = []
657         self.runner = runner
658         self.logger = logger
659         self.len_columns = lenght_columns
660         
661         # the list of jobs that have not been run yet
662         self._l_jobs_not_started = []
663         # the list of jobs that have already ran 
664         self._l_jobs_finished = []
665         # the list of jobs that are running 
666         self._l_jobs_running = [] 
667                 
668         self.determine_jobs_and_machines()
669     
670     def define_job(self, job_def, machine):
671         '''Takes a pyconf job definition and a machine (from class machine)
672            and returns the job instance corresponding to the definition.
673         
674         :param job_def src.config.Mapping: a job definition 
675         :param machine machine: the machine on which the job will run
676         :return: The corresponding job in a job class instance
677         :rtype: job
678         '''
679         name = job_def.name
680         cmmnds = job_def.commands
681         if not "timeout" in job_def:
682             timeout = 4*60*60 # default timeout = 4h
683         else:
684             timeout = job_def.timeout
685         after = None
686         if 'after' in job_def:
687             after = job_def.after
688         application = None
689         if 'application' in job_def:
690             application = job_def.application
691         board = None
692         if 'board' in job_def:
693             board = job_def.board
694         prefix = None
695         if "prefix" in job_def:
696             prefix = job_def.prefix
697             
698         return Job(name,
699                    machine,
700                    application,
701                    board,
702                    cmmnds,
703                    timeout,
704                    self.runner.cfg,
705                    self.job_file_path,
706                    self.logger,
707                    after = after,
708                    prefix = prefix)
709     
710     def determine_jobs_and_machines(self):
711         '''Function that reads the pyconf jobs definition and instantiates all
712            the machines and jobs to be done today.
713
714         :return: Nothing
715         :rtype: N\A
716         '''
717         today = datetime.date.weekday(datetime.date.today())
718         host_list = []
719                
720         for job_def in self.cfg_jobs.jobs :
721                 
722             if not "machine" in job_def:
723                 msg = _('WARNING: The job "%s" do not have the key '
724                        '"machine", this job is ignored.\n\n' % job_def.name)
725                 self.logger.write(src.printcolors.printcWarning(msg))
726                 continue
727             name_machine = job_def.machine
728             
729             a_machine = None
730             for mach in self.lmachines:
731                 if mach.name == name_machine:
732                     a_machine = mach
733                     break
734             
735             if a_machine == None:
736                 for machine_def in self.cfg_jobs.machines:
737                     if machine_def.name == name_machine:
738                         if 'host' not in machine_def:
739                             host = self.runner.cfg.VARS.hostname
740                         else:
741                             host = machine_def.host
742
743                         if 'user' not in machine_def:
744                             user = self.runner.cfg.VARS.user
745                         else:
746                             user = machine_def.user
747
748                         if 'port' not in machine_def:
749                             port = 22
750                         else:
751                             port = machine_def.port
752             
753                         if 'password' not in machine_def:
754                             passwd = None
755                         else:
756                             passwd = machine_def.password    
757                             
758                         if 'sat_path' not in machine_def:
759                             sat_path = "salomeTools"
760                         else:
761                             sat_path = machine_def.sat_path
762                         
763                         a_machine = Machine(
764                                             machine_def.name,
765                                             host,
766                                             user,
767                                             port=port,
768                                             passwd=passwd,
769                                             sat_path=sat_path
770                                             )
771                         
772                         self.lmachines.append(a_machine)
773                         if (host, port) not in host_list:
774                             host_list.append((host, port))
775                 
776                 if a_machine == None:
777                     msg = _("WARNING: The job \"%(job_name)s\" requires the "
778                             "machine \"%(machine_name)s\" but this machine "
779                             "is not defined in the configuration file.\n"
780                             "The job will not be launched\n")
781                     self.logger.write(src.printcolors.printcWarning(
782                                         msg % {"job_name" : job_def.name,
783                                                "machine_name" : name_machine}))
784                     continue
785                                   
786             a_job = self.define_job(job_def, a_machine)
787                 
788             if today in job_def.when:    
789                 self.ljobs.append(a_job)
790             else: # today in job_def.when
791                 self.ljobs_not_today.append(a_job)
792                
793         self.lhosts = host_list
794         
795     def ssh_connection_all_machines(self, pad=50):
796         '''Function that do the ssh connection to every machine 
797            to be used today.
798
799         :return: Nothing
800         :rtype: N\A
801         '''
802         self.logger.write(src.printcolors.printcInfo((
803                         "Establishing connection with all the machines :\n")))
804         for machine in self.lmachines:
805             # little algorithm in order to display traces
806             begin_line = (_("Connection to %s: " % machine.name))
807             if pad - len(begin_line) < 0:
808                 endline = " "
809             else:
810                 endline = (pad - len(begin_line)) * "." + " "
811             
812             step = "SSH connection"
813             self.logger.write( begin_line + endline + step)
814             self.logger.flush()
815             # the call to the method that initiate the ssh connection
816             msg = machine.connect(self.logger)
817             
818             # Copy salomeTools to the remote machine
819             if machine.successfully_connected(self.logger):
820                 step = _("Remove SAT")
821                 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
822                 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
823                 (__, out_dist, __) = machine.exec_command(
824                                                 "rm -rf %s" % machine.sat_path,
825                                                 self.logger)
826                 out_dist.read()
827                 
828                 self.logger.flush()
829                 step = _("Copy SAT")
830                 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
831                 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
832                 self.logger.flush()
833                 res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway,
834                                             self.job_file_path)
835
836                 # set the local settings of sat on the remote machine using
837                 # the init command
838                 (__, out_dist, __) = machine.exec_command(
839                                 os.path.join(machine.sat_path,
840                                     "sat init --base unknown --workdir"
841                                     " unknown --log_dir unknown"),
842                                 self.logger)
843                 out_dist.read()    
844                 
845                 # get the remote machine distribution using a sat command
846                 (__, out_dist, __) = machine.exec_command(
847                                 os.path.join(machine.sat_path,
848                                     "sat config --value VARS.dist --no_label"),
849                                 self.logger)
850                 machine.distribution = out_dist.read().decode().replace("\n",
851                                                                         "")
852                 
853                 # Print the status of the copy
854                 if res_copy == 0:
855                     self.logger.write('\r%s' % 
856                                 ((len(begin_line)+len(endline)+20) * " "), 3)
857                     self.logger.write('\r%s%s%s' % 
858                         (begin_line, 
859                          endline, 
860                          src.printcolors.printc(src.OK_STATUS)), 3)
861                 else:
862                     self.logger.write('\r%s' % 
863                             ((len(begin_line)+len(endline)+20) * " "), 3)
864                     self.logger.write('\r%s%s%s %s' % 
865                         (begin_line,
866                          endline,
867                          src.printcolors.printc(src.KO_STATUS),
868                          _("Copy of SAT failed: %s" % res_copy)), 3)
869             else:
870                 self.logger.write('\r%s' % 
871                                   ((len(begin_line)+len(endline)+20) * " "), 3)
872                 self.logger.write('\r%s%s%s %s' % 
873                     (begin_line,
874                      endline,
875                      src.printcolors.printc(src.KO_STATUS),
876                      msg), 3)
877             self.logger.write("\n", 3)
878                 
879         self.logger.write("\n")
880         
881
882     def is_occupied(self, hostname):
883         '''Function that returns True if a job is running on 
884            the machine defined by its host and its port.
885         
886         :param hostname (str, int): the pair (host, port)
887         :return: the job that is running on the host, 
888                 or false if there is no job running on the host. 
889         :rtype: job / bool
890         '''
891         host = hostname[0]
892         port = hostname[1]
893         for jb in self.ljobs:
894             if jb.machine.host == host and jb.machine.port == port:
895                 if jb.is_running():
896                     return jb
897         return False
898     
899     def update_jobs_states_list(self):
900         '''Function that updates the lists that store the currently
901            running jobs and the jobs that have already finished.
902         
903         :return: Nothing. 
904         :rtype: N\A
905         '''
906         jobs_finished_list = []
907         jobs_running_list = []
908         for jb in self.ljobs:
909             if jb.is_running():
910                 jobs_running_list.append(jb)
911                 jb.check_time()
912             if jb.has_finished():
913                 jobs_finished_list.append(jb)
914         
915         nb_job_finished_before = len(self._l_jobs_finished)
916         self._l_jobs_finished = jobs_finished_list
917         self._l_jobs_running = jobs_running_list
918         
919         nb_job_finished_now = len(self._l_jobs_finished)
920         
921         return nb_job_finished_now > nb_job_finished_before
922     
923     def cancel_dependencies_of_failing_jobs(self):
924         '''Function that cancels all the jobs that depend on a failing one.
925         
926         :return: Nothing. 
927         :rtype: N\A
928         '''
929         
930         for job in self.ljobs:
931             if job.after is None:
932                 continue
933             father_job = self.find_job_that_has_name(job.after)
934             if father_job is not None and father_job.has_failed():
935                 job.cancel()
936     
937     def find_job_that_has_name(self, name):
938         '''Returns the job by its name.
939         
940         :param name str: a job name
941         :return: the job that has the name. 
942         :rtype: job
943         '''
944         for jb in self.ljobs:
945             if jb.name == name:
946                 return jb
947         # the following is executed only if the job was not found
948         return None
949     
950     def str_of_length(self, text, length):
951         '''Takes a string text of any length and returns 
952            the most close string of length "length".
953         
954         :param text str: any string
955         :param length int: a length for the returned string
956         :return: the most close string of length "length"
957         :rtype: str
958         '''
959         if len(text) > length:
960             text_out = text[:length-3] + '...'
961         else:
962             diff = length - len(text)
963             before = " " * (diff//2)
964             after = " " * (diff//2 + diff%2)
965             text_out = before + text + after
966             
967         return text_out
968     
969     def display_status(self, len_col):
970         '''Takes a lenght and construct the display of the current status 
971            of the jobs in an array that has a column for each host.
972            It displays the job that is currently running on the host 
973            of the column.
974         
975         :param len_col int: the size of the column 
976         :return: Nothing
977         :rtype: N\A
978         '''
979         
980         display_line = ""
981         for host_port in self.lhosts:
982             jb = self.is_occupied(host_port)
983             if not jb: # nothing running on the host
984                 empty = self.str_of_length("empty", len_col)
985                 display_line += "|" + empty 
986             else:
987                 display_line += "|" + src.printcolors.printcInfo(
988                                         self.str_of_length(jb.name, len_col))
989         
990         self.logger.write("\r" + display_line + "|")
991         self.logger.flush()
992     
993
994     def run_jobs(self):
995         '''The main method. Runs all the jobs on every host. 
996            For each host, at a given time, only one job can be running.
997            The jobs that have the field after (that contain the job that has
998            to be run before it) are run after the previous job.
999            This method stops when all the jobs are finished.
1000         
1001         :return: Nothing
1002         :rtype: N\A
1003         '''
1004
1005         # Print header
1006         self.logger.write(src.printcolors.printcInfo(
1007                                                 _('Executing the jobs :\n')))
1008         text_line = ""
1009         for host_port in self.lhosts:
1010             host = host_port[0]
1011             port = host_port[1]
1012             if port == 22: # default value
1013                 text_line += "|" + self.str_of_length(host, self.len_columns)
1014             else:
1015                 text_line += "|" + self.str_of_length(
1016                                 "("+host+", "+str(port)+")", self.len_columns)
1017         
1018         tiret_line = " " + "-"*(len(text_line)-1) + "\n"
1019         self.logger.write(tiret_line)
1020         self.logger.write(text_line + "|\n")
1021         self.logger.write(tiret_line)
1022         self.logger.flush()
1023         
1024         # The infinite loop that runs the jobs
1025         l_jobs_not_started = src.deepcopy_list(self.ljobs)
1026         while len(self._l_jobs_finished) != len(self.ljobs):
1027             new_job_start = False
1028             for host_port in self.lhosts:
1029                 
1030                 if self.is_occupied(host_port):
1031                     continue
1032              
1033                 for jb in l_jobs_not_started:
1034                     if (jb.machine.host, jb.machine.port) != host_port:
1035                         continue 
1036                     if jb.after == None:
1037                         jb.run()
1038                         l_jobs_not_started.remove(jb)
1039                         new_job_start = True
1040                         break
1041                     else:
1042                         jb_before = self.find_job_that_has_name(jb.after)
1043                         if jb_before is None:
1044                             jb.cancel()
1045                             msg = _("This job was not launched because its "
1046                                     "father is not in the jobs list.")
1047                             jb.out = msg
1048                             jb.err = msg
1049                             break
1050                         if jb_before.has_finished():
1051                             jb.run()
1052                             l_jobs_not_started.remove(jb)
1053                             new_job_start = True
1054                             break
1055             self.cancel_dependencies_of_failing_jobs()
1056             new_job_finished = self.update_jobs_states_list()
1057             
1058             if new_job_start or new_job_finished:
1059                 if self.gui:
1060                     self.gui.update_xml_files(self.ljobs)            
1061                 # Display the current status     
1062                 self.display_status(self.len_columns)
1063             
1064             # Make sure that the proc is not entirely busy
1065             time.sleep(0.001)
1066         
1067         self.logger.write("\n")    
1068         self.logger.write(tiret_line)                   
1069         self.logger.write("\n\n")
1070         
1071         if self.gui:
1072             self.gui.update_xml_files(self.ljobs)
1073             self.gui.last_update()
1074
1075     def write_all_results(self):
1076         '''Display all the jobs outputs.
1077         
1078         :return: Nothing
1079         :rtype: N\A
1080         '''
1081         
1082         for jb in self.ljobs:
1083             self.logger.write(src.printcolors.printcLabel(
1084                         "#------- Results for job %s -------#\n" % jb.name))
1085             jb.write_results()
1086             self.logger.write("\n\n")
1087
1088 class Gui(object):
1089     '''Class to manage the the xml data that can be displayed in a browser to
1090        see the jobs states
1091     '''
1092    
1093     def __init__(self,
1094                  xml_dir_path,
1095                  l_jobs,
1096                  l_jobs_not_today,
1097                  prefix,
1098                  logger,
1099                  file_boards=""):
1100         '''Initialization
1101         
1102         :param xml_dir_path str: The path to the directory where to put 
1103                                  the xml resulting files
1104         :param l_jobs List: the list of jobs that run today
1105         :param l_jobs_not_today List: the list of jobs that do not run today
1106         :param file_boards str: the file path from which to read the
1107                                    expected boards
1108         '''
1109         # The logging instance
1110         self.logger = logger
1111         
1112         # The prefix to add to the xml files : date_hour
1113         self.prefix = prefix
1114         
1115         # The path of the csv files to read to fill the expected boards
1116         self.file_boards = file_boards
1117         
1118         if file_boards != "":
1119             today = datetime.date.weekday(datetime.date.today())
1120             self.parse_csv_boards(today)
1121         else:
1122             self.d_input_boards = {}
1123         
1124         # The path of the global xml file
1125         self.xml_dir_path = xml_dir_path
1126         # Initialize the xml files
1127         self.global_name = "global_report"
1128         xml_global_path = os.path.join(self.xml_dir_path,
1129                                        self.global_name + ".xml")
1130         self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path,
1131                                                          "JobsReport")
1132
1133         # Find history for each job
1134         self.history = {}
1135         self.find_history(l_jobs, l_jobs_not_today)
1136
1137         # The xml files that corresponds to the boards.
1138         # {name_board : xml_object}}
1139         self.d_xml_board_files = {}
1140
1141         # Create the lines and columns
1142         self.initialize_boards(l_jobs, l_jobs_not_today)
1143
1144         # Write the xml file
1145         self.update_xml_files(l_jobs)
1146     
1147     def add_xml_board(self, name):
1148         '''Add a board to the board list   
1149         :param name str: the board name
1150         '''
1151         xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
1152         self.d_xml_board_files[name] =  src.xmlManager.XmlLogFile(
1153                                                     xml_board_path,
1154                                                     "JobsReport")
1155         self.d_xml_board_files[name].add_simple_node("distributions")
1156         self.d_xml_board_files[name].add_simple_node("applications")
1157         self.d_xml_board_files[name].add_simple_node("board", text=name)
1158            
1159     def initialize_boards(self, l_jobs, l_jobs_not_today):
1160         '''Get all the first information needed for each file and write the 
1161            first version of the files   
1162         :param l_jobs List: the list of jobs that run today
1163         :param l_jobs_not_today List: the list of jobs that do not run today
1164         '''
1165         # Get the boards to fill and put it in a dictionary
1166         # {board_name : xml instance corresponding to the board}
1167         for job in l_jobs + l_jobs_not_today:
1168             board = job.board
1169             if (board is not None and 
1170                                 board not in self.d_xml_board_files.keys()):
1171                 self.add_xml_board(board)
1172         
1173         # Verify that the boards given as input are done
1174         for board in list(self.d_input_boards.keys()):
1175             if board not in self.d_xml_board_files:
1176                 self.add_xml_board(board)
1177             root_node = self.d_xml_board_files[board].xmlroot
1178             src.xmlManager.append_node_attrib(root_node, 
1179                                               {"input_file" : self.file_boards})
1180         
1181         # Loop over all jobs in order to get the lines and columns for each 
1182         # xml file
1183         d_dist = {}
1184         d_application = {}
1185         for board in self.d_xml_board_files:
1186             d_dist[board] = []
1187             d_application[board] = []
1188             
1189         l_hosts_ports = []
1190             
1191         for job in l_jobs + l_jobs_not_today:
1192             
1193             if (job.machine.host, job.machine.port) not in l_hosts_ports:
1194                 l_hosts_ports.append((job.machine.host, job.machine.port))
1195                 
1196             distrib = job.machine.distribution
1197             application = job.application
1198             
1199             board_job = job.board
1200             if board is None:
1201                 continue
1202             for board in self.d_xml_board_files:
1203                 if board_job == board:
1204                     if (distrib not in [None, ''] and 
1205                                             distrib not in d_dist[board]):
1206                         d_dist[board].append(distrib)
1207                         src.xmlManager.add_simple_node(
1208                             self.d_xml_board_files[board].xmlroot.find(
1209                                                             'distributions'),
1210                                                    "dist",
1211                                                    attrib={"name" : distrib})
1212                     
1213                 if board_job == board:
1214                     if (application not in [None, ''] and 
1215                                     application not in d_application[board]):
1216                         d_application[board].append(application)
1217                         src.xmlManager.add_simple_node(
1218                             self.d_xml_board_files[board].xmlroot.find(
1219                                                                 'applications'),
1220                                                    "application",
1221                                                    attrib={
1222                                                         "name" : application})
1223         
1224         # Verify that there are no missing application or distribution in the
1225         # xml board files (regarding the input boards)
1226         for board in self.d_xml_board_files:
1227             l_dist = d_dist[board]
1228             if board not in self.d_input_boards.keys():
1229                 continue
1230             for dist in self.d_input_boards[board]["rows"]:
1231                 if dist not in l_dist:
1232                     src.xmlManager.add_simple_node(
1233                             self.d_xml_board_files[board].xmlroot.find(
1234                                                             'distributions'),
1235                                                    "dist",
1236                                                    attrib={"name" : dist})
1237             l_appli = d_application[board]
1238             for appli in self.d_input_boards[board]["columns"]:
1239                 if appli not in l_appli:
1240                     src.xmlManager.add_simple_node(
1241                             self.d_xml_board_files[board].xmlroot.find(
1242                                                                 'applications'),
1243                                                    "application",
1244                                                    attrib={"name" : appli})
1245                 
1246         # Initialize the hosts_ports node for the global file
1247         self.xmlhosts_ports = self.xml_global_file.add_simple_node(
1248                                                                 "hosts_ports")
1249         for host, port in l_hosts_ports:
1250             host_port = "%s:%i" % (host, port)
1251             src.xmlManager.add_simple_node(self.xmlhosts_ports,
1252                                            "host_port",
1253                                            attrib={"name" : host_port})
1254         
1255         # Initialize the jobs node in all files
1256         for xml_file in [self.xml_global_file] + list(
1257                                             self.d_xml_board_files.values()):
1258             xml_jobs = xml_file.add_simple_node("jobs")      
1259             # Get the jobs present in the config file but 
1260             # that will not be launched today
1261             self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
1262             
1263             # add also the infos node
1264             xml_file.add_simple_node("infos",
1265                                      attrib={"name" : "last update",
1266                                              "JobsCommandStatus" : "running"})
1267             
1268             # and put the history node
1269             history_node = xml_file.add_simple_node("history")
1270             name_board = os.path.basename(xml_file.logFile)[:-len(".xml")]
1271             # serach for board files
1272             expression = "^[0-9]{8}_+[0-9]{6}_" + name_board + ".xml$"
1273             oExpr = re.compile(expression)
1274             # Get the list of xml borad files that are in the log directory
1275             for file_name in os.listdir(self.xml_dir_path):
1276                 if oExpr.search(file_name):
1277                     date = os.path.basename(file_name).split("_")[0]
1278                     file_path = os.path.join(self.xml_dir_path, file_name)
1279                     src.xmlManager.add_simple_node(history_node,
1280                                                    "link",
1281                                                    text=file_path,
1282                                                    attrib={"date" : date})      
1283             
1284                 
1285         # Find in each board the squares that needs to be filled regarding the
1286         # input csv files but that are not covered by a today job
1287         for board in self.d_input_boards.keys():
1288             xml_root_board = self.d_xml_board_files[board].xmlroot
1289             # Find the missing jobs for today
1290             xml_missing = src.xmlManager.add_simple_node(xml_root_board,
1291                                                  "missing_jobs")
1292             for row, column in self.d_input_boards[board]["jobs"]:
1293                 found = False
1294                 for job in l_jobs:
1295                     if (job.application == column and 
1296                         job.machine.distribution == row):
1297                         found = True
1298                         break
1299                 if not found:
1300                     src.xmlManager.add_simple_node(xml_missing,
1301                                             "job",
1302                                             attrib={"distribution" : row,
1303                                                     "application" : column })
1304             # Find the missing jobs not today
1305             xml_missing_not_today = src.xmlManager.add_simple_node(
1306                                                  xml_root_board,
1307                                                  "missing_jobs_not_today")
1308             for row, column in self.d_input_boards[board]["jobs_not_today"]:
1309                 found = False
1310                 for job in l_jobs_not_today:
1311                     if (job.application == column and 
1312                         job.machine.distribution == row):
1313                         found = True
1314                         break
1315                 if not found:
1316                     src.xmlManager.add_simple_node(xml_missing_not_today,
1317                                             "job",
1318                                             attrib={"distribution" : row,
1319                                                     "application" : column })
1320
1321     def find_history(self, l_jobs, l_jobs_not_today):
1322         """find, for each job, in the existent xml boards the results for the 
1323            job. Store the results in the dictionnary self.history = {name_job : 
1324            list of (date, status, list links)}
1325         
1326         :param l_jobs List: the list of jobs to run today   
1327         :param l_jobs_not_today List: the list of jobs that do not run today
1328         """
1329         # load the all the history
1330         expression = "^[0-9]{8}_+[0-9]{6}_" + self.global_name + ".xml$"
1331         oExpr = re.compile(expression)
1332         # Get the list of global xml that are in the log directory
1333         l_globalxml = []
1334         for file_name in os.listdir(self.xml_dir_path):
1335             if oExpr.search(file_name):
1336                 file_path = os.path.join(self.xml_dir_path, file_name)
1337                 try:
1338                     global_xml = src.xmlManager.ReadXmlFile(file_path)
1339                     l_globalxml.append(global_xml)
1340                 except Exception as e:
1341                     msg = _("\nWARNING: the file %s can not be read, it will be "
1342                             "ignored\n%s" % (file_path, e))
1343                     self.logger.write("%s\n" % src.printcolors.printcWarning(
1344                                                                         msg), 5)
1345                     
1346         # Construct the dictionnary self.history 
1347         for job in l_jobs + l_jobs_not_today:
1348             l_links = []
1349             for global_xml in l_globalxml:
1350                 date = os.path.basename(global_xml.filePath).split("_")[0]
1351                 global_root_node = global_xml.xmlroot.find("jobs")
1352                 job_node = src.xmlManager.find_node_by_attrib(
1353                                                               global_root_node,
1354                                                               "job",
1355                                                               "name",
1356                                                               job.name)
1357                 if job_node:
1358                     if job_node.find("remote_log_file_path") is not None:
1359                         link = job_node.find("remote_log_file_path").text
1360                         res_job = job_node.find("res").text
1361                         if link != "nothing":
1362                             l_links.append((date, res_job, link))
1363             l_links = sorted(l_links, reverse=True)
1364             self.history[job.name] = l_links
1365   
1366     def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
1367         '''Get all the first information needed for each file and write the 
1368            first version of the files   
1369
1370         :param xml_node_jobs etree.Element: the node corresponding to a job
1371         :param l_jobs_not_today List: the list of jobs that do not run today
1372         '''
1373         for job in l_jobs_not_today:
1374             xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1375                                                  "job",
1376                                                  attrib={"name" : job.name})
1377             src.xmlManager.add_simple_node(xmlj, "application", job.application)
1378             src.xmlManager.add_simple_node(xmlj,
1379                                            "distribution",
1380                                            job.machine.distribution)
1381             src.xmlManager.add_simple_node(xmlj, "board", job.board)
1382             src.xmlManager.add_simple_node(xmlj,
1383                                        "commands", " ; ".join(job.commands))
1384             src.xmlManager.add_simple_node(xmlj, "state", "Not today")
1385             src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1386             src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1387             src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1388             src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1389             src.xmlManager.add_simple_node(xmlj, "sat_path",
1390                                                         job.machine.sat_path)
1391             xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1392             for i, (date, res_job, link) in enumerate(self.history[job.name]):
1393                 if i==0:
1394                     # tag the first one (the last one)
1395                     src.xmlManager.add_simple_node(xml_history,
1396                                                    "link",
1397                                                    text=link,
1398                                                    attrib={"date" : date,
1399                                                            "res" : res_job,
1400                                                            "last" : "yes"})
1401                 else:
1402                     src.xmlManager.add_simple_node(xml_history,
1403                                                    "link",
1404                                                    text=link,
1405                                                    attrib={"date" : date,
1406                                                            "res" : res_job,
1407                                                            "last" : "no"})
1408
1409     def parse_csv_boards(self, today):
1410         """ Parse the csv file that describes the boards to produce and fill 
1411             the dict d_input_boards that contain the csv file contain
1412         
1413         :param today int: the current day of the week 
1414         """
1415         # open the csv file and read its content
1416         l_read = []
1417         with open(self.file_boards, 'r') as f:
1418             reader = csv.reader(f,delimiter=CSV_DELIMITER)
1419             for row in reader:
1420                 l_read.append(row)
1421         # get the delimiter for the boards (empty line)
1422         boards_delimiter = [''] * len(l_read[0])
1423         # Make the list of boards, by splitting with the delimiter
1424         l_boards = [list(y) for x, y in itertools.groupby(l_read,
1425                                     lambda z: z == boards_delimiter) if not x]
1426            
1427         # loop over the csv lists of lines and get the rows, columns and jobs
1428         d_boards = {}
1429         for input_board in l_boards:
1430             # get board name
1431             board_name = input_board[0][0]
1432             
1433             # Get columns list
1434             columns = input_board[0][1:]
1435             
1436             rows = []
1437             jobs = []
1438             jobs_not_today = []
1439             for line in input_board[1:]:
1440                 row = line[0]
1441                 rows.append(row)
1442                 for i, square in enumerate(line[1:]):
1443                     if square=='':
1444                         continue
1445                     days = square.split(DAYS_SEPARATOR)
1446                     days = [int(day) for day in days]
1447                     job = (row, columns[i])
1448                     if today in days:                           
1449                         jobs.append(job)
1450                     else:
1451                         jobs_not_today.append(job)
1452
1453             d_boards[board_name] = {"rows" : rows,
1454                                     "columns" : columns,
1455                                     "jobs" : jobs,
1456                                     "jobs_not_today" : jobs_not_today}
1457         
1458         self.d_input_boards = d_boards
1459
1460     def update_xml_files(self, l_jobs):
1461         '''Write all the xml files with updated information about the jobs   
1462
1463         :param l_jobs List: the list of jobs that run today
1464         '''
1465         for xml_file in [self.xml_global_file] + list(
1466                                             self.d_xml_board_files.values()):
1467             self.update_xml_file(l_jobs, xml_file)
1468             
1469         # Write the file
1470         self.write_xml_files()
1471             
1472     def update_xml_file(self, l_jobs, xml_file):      
1473         '''update information about the jobs for the file xml_file   
1474
1475         :param l_jobs List: the list of jobs that run today
1476         :param xml_file xmlManager.XmlLogFile: the xml instance to update
1477         '''
1478         
1479         xml_node_jobs = xml_file.xmlroot.find('jobs')
1480         # Update the job names and status node
1481         for job in l_jobs:
1482             # Find the node corresponding to the job and delete it
1483             # in order to recreate it
1484             for xmljob in xml_node_jobs.findall('job'):
1485                 if xmljob.attrib['name'] == job.name:
1486                     xml_node_jobs.remove(xmljob)
1487             
1488             T0 = str(job._T0)
1489             if T0 != "-1":
1490                 T0 = time.strftime('%Y-%m-%d %H:%M:%S', 
1491                                        time.localtime(job._T0))
1492             Tf = str(job._Tf)
1493             if Tf != "-1":
1494                 Tf = time.strftime('%Y-%m-%d %H:%M:%S', 
1495                                        time.localtime(job._Tf))
1496             
1497             # recreate the job node
1498             xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1499                                                   "job",
1500                                                   attrib={"name" : job.name})
1501             src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1502             src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1503             src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1504             src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1505             xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1506             for date, res_job, link in self.history[job.name]:
1507                 src.xmlManager.add_simple_node(xml_history,
1508                                                "link",
1509                                                text=link,
1510                                                attrib={"date" : date,
1511                                                        "res" : res_job})
1512
1513             src.xmlManager.add_simple_node(xmlj, "sat_path",
1514                                            job.machine.sat_path)
1515             src.xmlManager.add_simple_node(xmlj, "application", job.application)
1516             src.xmlManager.add_simple_node(xmlj, "distribution",
1517                                            job.machine.distribution)
1518             src.xmlManager.add_simple_node(xmlj, "board", job.board)
1519             src.xmlManager.add_simple_node(xmlj, "timeout", str(job.timeout))
1520             src.xmlManager.add_simple_node(xmlj, "commands",
1521                                            " ; ".join(job.commands))
1522             src.xmlManager.add_simple_node(xmlj, "state", job.get_status())
1523             src.xmlManager.add_simple_node(xmlj, "begin", T0)
1524             src.xmlManager.add_simple_node(xmlj, "end", Tf)
1525             src.xmlManager.add_simple_node(xmlj, "out",
1526                                            src.printcolors.cleancolor(job.out))
1527             src.xmlManager.add_simple_node(xmlj, "err",
1528                                            src.printcolors.cleancolor(job.err))
1529             src.xmlManager.add_simple_node(xmlj, "res", str(job.res_job))
1530             if len(job.remote_log_files) > 0:
1531                 src.xmlManager.add_simple_node(xmlj,
1532                                                "remote_log_file_path",
1533                                                job.remote_log_files[0])
1534             else:
1535                 src.xmlManager.add_simple_node(xmlj,
1536                                                "remote_log_file_path",
1537                                                "nothing")           
1538             # Search for the test log if there is any
1539             l_test_log_files = self.find_test_log(job.remote_log_files)
1540             xml_test = src.xmlManager.add_simple_node(xmlj,
1541                                                       "test_log_file_path")
1542             for test_log_path, res_test, nb_fails in l_test_log_files:
1543                 test_path_node = src.xmlManager.add_simple_node(xml_test,
1544                                                "path",
1545                                                test_log_path)
1546                 test_path_node.attrib["res"] = res_test
1547                 test_path_node.attrib["nb_fails"] = nb_fails
1548             
1549             xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
1550             # get the job father
1551             if job.after is not None:
1552                 job_father = None
1553                 for jb in l_jobs:
1554                     if jb.name == job.after:
1555                         job_father = jb
1556                 
1557                 if (job_father is not None and 
1558                         len(job_father.remote_log_files) > 0):
1559                     link = job_father.remote_log_files[0]
1560                 else:
1561                     link = "nothing"
1562                 src.xmlManager.append_node_attrib(xmlafter, {"link" : link})
1563             
1564             # Verify that the job is to be done today regarding the input csv
1565             # files
1566             if job.board and job.board in self.d_input_boards.keys():
1567                 found = False
1568                 for dist, appli in self.d_input_boards[job.board]["jobs"]:
1569                     if (job.machine.distribution == dist 
1570                         and job.application == appli):
1571                         found = True
1572                         src.xmlManager.add_simple_node(xmlj,
1573                                                "extra_job",
1574                                                "no")
1575                         break
1576                 if not found:
1577                     src.xmlManager.add_simple_node(xmlj,
1578                                                "extra_job",
1579                                                "yes")
1580             
1581         
1582         # Update the date
1583         xml_node_infos = xml_file.xmlroot.find('infos')
1584         src.xmlManager.append_node_attrib(xml_node_infos,
1585                     attrib={"value" : 
1586                     datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
1587                
1588
1589     def find_test_log(self, l_remote_log_files):
1590         '''Find if there is a test log (board) in the remote log files and 
1591            the path to it. There can be several test command, so the result is
1592            a list.
1593
1594         :param l_remote_log_files List: the list of all remote log files
1595         :return: the list of (test log files path, res of the command)
1596         :rtype: List
1597         '''
1598         res = []
1599         for file_path in l_remote_log_files:
1600             dirname = os.path.basename(os.path.dirname(file_path))
1601             file_name = os.path.basename(file_path)
1602             regex = src.logger.log_all_command_file_expression
1603             oExpr = re.compile(regex)
1604             if dirname == "TEST" and oExpr.search(file_name):
1605                 # find the res of the command
1606                 prod_node = etree.parse(file_path).getroot().find("product")
1607                 res_test = prod_node.attrib["global_res"]
1608                 # find the number of fails
1609                 testbase_node = prod_node.find("tests").find("testbase")
1610                 nb_fails = int(testbase_node.attrib["failed"])
1611                 # put the file path, the res of the test command and the number 
1612                 # of fails in the output
1613                 res.append((file_path, res_test, nb_fails))
1614                 
1615         return res
1616     
1617     def last_update(self, finish_status = "finished"):
1618         '''update information about the jobs for the file xml_file   
1619
1620         :param l_jobs List: the list of jobs that run today
1621         :param xml_file xmlManager.XmlLogFile: the xml instance to update
1622         '''
1623         for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
1624             xml_node_infos = xml_file.xmlroot.find('infos')
1625             src.xmlManager.append_node_attrib(xml_node_infos,
1626                         attrib={"JobsCommandStatus" : finish_status})
1627         # Write the file
1628         self.write_xml_files()
1629
1630     def write_xml_file(self, xml_file, stylesheet):
1631         ''' Write one xml file and the same file with prefix
1632         '''
1633         xml_file.write_tree(stylesheet)
1634         file_path = xml_file.logFile
1635         file_dir = os.path.dirname(file_path)
1636         file_name = os.path.basename(file_path)
1637         file_name_with_prefix = self.prefix + "_" + file_name
1638         xml_file.write_tree(stylesheet, os.path.join(file_dir,
1639                                                      file_name_with_prefix))
1640         
1641     def write_xml_files(self):
1642         ''' Write the xml files   
1643         '''
1644         self.write_xml_file(self.xml_global_file, STYLESHEET_GLOBAL)
1645         for xml_file in self.d_xml_board_files.values():
1646             self.write_xml_file(xml_file, STYLESHEET_BOARD)
1647
1648 def get_config_file_path(job_config_name, l_cfg_dir):
1649     found = False
1650     file_jobs_cfg = None
1651     if os.path.exists(job_config_name) and job_config_name.endswith(".pyconf"):
1652         found = True
1653         file_jobs_cfg = job_config_name
1654     else:
1655         for cfg_dir in l_cfg_dir:
1656             file_jobs_cfg = os.path.join(cfg_dir, job_config_name)
1657             if not file_jobs_cfg.endswith('.pyconf'):
1658                 file_jobs_cfg += '.pyconf'
1659             
1660             if not os.path.exists(file_jobs_cfg):
1661                 continue
1662             else:
1663                 found = True
1664                 break
1665     return found, file_jobs_cfg
1666
1667 ##
1668 # Describes the command
1669 def description():
1670     return _("The jobs command launches maintenances that are described"
1671              " in the dedicated jobs configuration file.\n\nexample:\nsat "
1672              "jobs --name my_jobs --publish")
1673
1674 ##
1675 # Runs the command.
1676 def run(args, runner, logger):
1677        
1678     (options, args) = parser.parse_args(args)
1679        
1680     l_cfg_dir = runner.cfg.PATHS.JOBPATH
1681     
1682     # list option : display all the available config files
1683     if options.list:
1684         for cfg_dir in l_cfg_dir:
1685             if not options.no_label:
1686                 logger.write("------ %s\n" % 
1687                                  src.printcolors.printcHeader(cfg_dir))
1688             if not os.path.exists(cfg_dir):
1689                 continue
1690             for f in sorted(os.listdir(cfg_dir)):
1691                 if not f.endswith('.pyconf'):
1692                     continue
1693                 cfilename = f[:-7]
1694                 logger.write("%s\n" % cfilename)
1695         return 0
1696
1697     # Make sure the jobs_config option has been called
1698     if not options.jobs_cfg:
1699         message = _("The option --jobs_config is required\n")      
1700         src.printcolors.printcError(message)
1701         return 1
1702     
1703     # Find the file in the directories, unless it is a full path
1704     # merge all in a config
1705     merger = src.pyconf.ConfigMerger()
1706     config_jobs = src.pyconf.Config()
1707     l_conf_files_path = []
1708     for config_file in options.jobs_cfg:
1709         found, file_jobs_cfg = get_config_file_path(config_file, l_cfg_dir)
1710         if not found:
1711             msg = _("The file configuration %s was not found."
1712                     "\nUse the --list option to get the "
1713                     "possible files." % config_file)
1714             logger.write("%s\n" % src.printcolors.printcError(msg), 1)
1715             return 1
1716         l_conf_files_path.append(file_jobs_cfg)
1717         # Read the config that is in the file
1718         one_config_jobs = src.read_config_from_a_file(file_jobs_cfg)
1719         merger.merge(config_jobs, one_config_jobs)
1720     
1721     info = [
1722         (_("Platform"), runner.cfg.VARS.dist),
1723         (_("Files containing the jobs configuration"), l_conf_files_path)
1724     ]    
1725     src.print_info(logger, info)
1726
1727     if options.only_jobs:
1728         l_jb = src.pyconf.Sequence()
1729         for jb in config_jobs.jobs:
1730             if jb.name in options.only_jobs:
1731                 l_jb.append(jb,
1732                 "Job that was given in only_jobs option parameters\n")
1733         config_jobs.jobs = l_jb
1734     
1735     # Make a unique file that contain all the jobs in order to use it 
1736     # on every machine
1737     name_pyconf = "_".join([os.path.basename(path)[:-len('.pyconf')] 
1738                             for path in l_conf_files_path]) + ".pyconf"
1739     path_pyconf = src.get_tmp_filename(runner.cfg, name_pyconf)
1740     #Save config
1741     f = file( path_pyconf , 'w')
1742     config_jobs.__save__(f)
1743     
1744     # log the paramiko problems
1745     log_dir = src.get_log_path(runner.cfg)
1746     paramiko_log_dir_path = os.path.join(log_dir, "JOBS")
1747     src.ensure_path_exists(paramiko_log_dir_path)
1748     paramiko.util.log_to_file(os.path.join(paramiko_log_dir_path,
1749                                            logger.txtFileName))
1750     
1751     # Initialization
1752     today_jobs = Jobs(runner,
1753                       logger,
1754                       path_pyconf,
1755                       config_jobs)
1756     # SSH connection to all machines
1757     today_jobs.ssh_connection_all_machines()
1758     if options.test_connection:
1759         return 0
1760     
1761     gui = None
1762     if options.publish:
1763         logger.write(src.printcolors.printcInfo(
1764                                         _("Initialize the xml boards : ")), 5)
1765         logger.flush()
1766         
1767         # Copy the stylesheets in the log directory 
1768         log_dir = log_dir
1769         xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
1770         files_to_copy = []
1771         files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
1772         files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD))
1773         files_to_copy.append(os.path.join(xsl_dir, "running.gif"))
1774         for file_path in files_to_copy:
1775             shutil.copy2(file_path, log_dir)
1776         
1777         # Instanciate the Gui in order to produce the xml files that contain all
1778         # the boards
1779         gui = Gui(log_dir,
1780                   today_jobs.ljobs,
1781                   today_jobs.ljobs_not_today,
1782                   runner.cfg.VARS.datehour,
1783                   logger,
1784                   file_boards = options.input_boards)
1785         
1786         logger.write(src.printcolors.printcSuccess("OK"), 5)
1787         logger.write("\n\n", 5)
1788         logger.flush()
1789         
1790         # Display the list of the xml files
1791         logger.write(src.printcolors.printcInfo(("Here is the list of published"
1792                                                  " files :\n")), 4)
1793         logger.write("%s\n" % gui.xml_global_file.logFile, 4)
1794         for board in gui.d_xml_board_files.keys():
1795             file_path = gui.d_xml_board_files[board].logFile
1796             file_name = os.path.basename(file_path)
1797             logger.write("%s\n" % file_path, 4)
1798             logger.add_link(file_name, "board", 0, board)
1799               
1800         logger.write("\n", 4)
1801         
1802     today_jobs.gui = gui
1803     
1804     interruped = False
1805     try:
1806         # Run all the jobs contained in config_jobs
1807         today_jobs.run_jobs()
1808     except KeyboardInterrupt:
1809         interruped = True
1810         logger.write("\n\n%s\n\n" % 
1811                 (src.printcolors.printcWarning(_("Forced interruption"))), 1)
1812     except Exception as e:
1813         msg = _("CRITICAL ERROR: The jobs loop has been interrupted\n")
1814         logger.write("\n\n%s\n" % src.printcolors.printcError(msg) )
1815         logger.write("%s\n" % str(e))
1816         # get stack
1817         __, __, exc_traceback = sys.exc_info()
1818         fp = tempfile.TemporaryFile()
1819         traceback.print_tb(exc_traceback, file=fp)
1820         fp.seek(0)
1821         stack = fp.read()
1822         logger.write("\nTRACEBACK: %s\n" % stack.replace('"',"'"), 1)
1823         
1824     finally:
1825         res = 0
1826         if interruped:
1827             res = 1
1828             msg = _("Killing the running jobs and trying"
1829                     " to get the corresponding logs\n")
1830             logger.write(src.printcolors.printcWarning(msg))
1831             
1832         # find the potential not finished jobs and kill them
1833         for jb in today_jobs.ljobs:
1834             if not jb.has_finished():
1835                 res = 1
1836                 try:
1837                     jb.kill_remote_process()
1838                 except Exception as e:
1839                     msg = _("Failed to kill job %s: %s\n" % (jb.name, e))
1840                     logger.write(src.printcolors.printcWarning(msg))
1841             if jb.res_job != "0":
1842                 res = 1
1843         if interruped:
1844             if today_jobs.gui:
1845                 today_jobs.gui.last_update(_("Forced interruption"))
1846         else:
1847             if today_jobs.gui:
1848                 today_jobs.gui.last_update()
1849         # Output the results
1850         today_jobs.write_all_results()
1851         # Remove the temporary pyconf file
1852         if os.path.exists(path_pyconf):
1853             os.remove(path_pyconf)
1854         return res