Salome HOME
add AllTestLauncher and src/loggingSimple.py etc...
[tools/sat.git] / commands / jobs.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2013  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import sys
21 import tempfile
22 import traceback
23 import datetime
24 import time
25 import csv
26 import shutil
27 import itertools
28 import re
29
30 # generate problem
31 try:
32   import paramiko
33 except:
34   paramiko = "import paramiko impossible"
35   pass
36
37 import src
38 import src.ElementTree as etree
39
40 STYLESHEET_GLOBAL = "jobs_global_report.xsl"
41 STYLESHEET_BOARD = "jobs_board_report.xsl"
42
43 DAYS_SEPARATOR = ","
44 CSV_DELIMITER = ";"
45
46 parser = src.options.Options()
47
48 parser.add_option('n', 'name', 'list2', 'jobs_cfg', 
49                   _('Mandatory: The name of the config file that contains'
50                   ' the jobs configuration. Can be a list.'))
51 parser.add_option('o', 'only_jobs', 'list2', 'only_jobs',
52                   _('Optional: the list of jobs to launch, by their name. '))
53 parser.add_option('l', 'list', 'boolean', 'list', 
54                   _('Optional: list all available config files.'))
55 parser.add_option('t', 'test_connection', 'boolean', 'test_connection',
56                   _("Optional: try to connect to the machines. "
57                     "Not executing the jobs."),
58                   False)
59 parser.add_option('p', 'publish', 'boolean', 'publish',
60                   _("Optional: generate an xml file that can be read in a "
61                     "browser to display the jobs status."),
62                   False)
63 parser.add_option('i', 'input_boards', 'string', 'input_boards', _("Optional: "
64                                 "the path to csv file that contain "
65                                 "the expected boards."),"")
66 parser.add_option('', 'completion', 'boolean', 'no_label',
67                   _("Optional (internal use): do not print labels, Works only "
68                     "with --list."),
69                   False)
70
71 class Machine(object):
72     '''Class to manage a ssh connection on a machine
73     '''
74     def __init__(self,
75                  name,
76                  host,
77                  user,
78                  port=22,
79                  passwd=None,
80                  sat_path="salomeTools"):
81         self.name = name
82         self.host = host
83         self.port = port
84         self.distribution = None # Will be filled after copying SAT on the machine
85         self.user = user
86         self.password = passwd
87         self.sat_path = sat_path
88         self.ssh = paramiko.SSHClient()
89         self._connection_successful = None
90     
91     def connect(self, logger):
92         '''Initiate the ssh connection to the remote machine
93         
94         :param logger src.logger.Logger: The logger instance 
95         :return: Nothing
96         :rtype: N\A
97         '''
98
99         self._connection_successful = False
100         self.ssh.load_system_host_keys()
101         self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
102         try:
103             self.ssh.connect(self.host,
104                              port=self.port,
105                              username=self.user,
106                              password = self.password)
107         except paramiko.AuthenticationException:
108             message = src.KO_STATUS + _("Authentication failed")
109         except paramiko.BadHostKeyException:
110             message = (src.KO_STATUS + 
111                        _("The server's host key could not be verified"))
112         except paramiko.SSHException:
113             message = ( _("SSHException error connecting or "
114                           "establishing an SSH session"))            
115         except:
116             message = ( _("Error connecting or establishing an SSH session"))
117         else:
118             self._connection_successful = True
119             message = ""
120         return message
121     
122     def successfully_connected(self, logger):
123         '''Verify if the connection to the remote machine has succeed
124         
125         :param logger src.logger.Logger: The logger instance 
126         :return: True if the connection has succeed, False if not
127         :rtype: bool
128         '''
129         if self._connection_successful == None:
130             message = _("Warning : trying to ask if the connection to "
131             "(name: %s host: %s, port: %s, user: %s) is OK whereas there were"
132             " no connection request" % 
133                         (self.name, self.host, self.port, self.user))
134             logger.write( src.printcolors.printcWarning(message))
135         return self._connection_successful
136
137     def copy_sat(self, sat_local_path, job_file):
138         '''Copy salomeTools to the remote machine in self.sat_path
139         '''
140         res = 0
141         try:
142             # open a sftp connection
143             self.sftp = self.ssh.open_sftp()
144             # Create the sat directory on remote machine if it is not existing
145             self.mkdir(self.sat_path, ignore_existing=True)
146             # Put sat
147             self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
148             # put the job configuration file in order to make it reachable 
149             # on the remote machine
150             remote_job_file_name = ".%s" % os.path.basename(job_file)
151             self.sftp.put(job_file, os.path.join(self.sat_path,
152                                                  remote_job_file_name))
153         except Exception as e:
154             res = str(e)
155             self._connection_successful = False
156         
157         return res
158         
159     def put_dir(self, source, target, filters = []):
160         ''' Uploads the contents of the source directory to the target path. The
161             target directory needs to exists. All sub-directories in source are 
162             created under target.
163         '''
164         for item in os.listdir(source):
165             if item in filters:
166                 continue
167             source_path = os.path.join(source, item)
168             destination_path = os.path.join(target, item)
169             if os.path.islink(source_path):
170                 linkto = os.readlink(source_path)
171                 try:
172                     self.sftp.symlink(linkto, destination_path)
173                     self.sftp.chmod(destination_path,
174                                     os.stat(source_path).st_mode)
175                 except IOError:
176                     pass
177             else:
178                 if os.path.isfile(source_path):
179                     self.sftp.put(source_path, destination_path)
180                     self.sftp.chmod(destination_path,
181                                     os.stat(source_path).st_mode)
182                 else:
183                     self.mkdir(destination_path, ignore_existing=True)
184                     self.put_dir(source_path, destination_path)
185
186     def mkdir(self, path, mode=511, ignore_existing=False):
187         ''' Augments mkdir by adding an option to not fail 
188             if the folder exists 
189         '''
190         try:
191             self.sftp.mkdir(path, mode)
192         except IOError:
193             if ignore_existing:
194                 pass
195             else:
196                 raise       
197     
198     def exec_command(self, command, logger):
199         '''Execute the command on the remote machine
200         
201         :param command str: The command to be run
202         :param logger src.logger.Logger: The logger instance 
203         :return: the stdin, stdout, and stderr of the executing command,
204                  as a 3-tuple
205         :rtype: (paramiko.channel.ChannelFile, paramiko.channel.ChannelFile,
206                 paramiko.channel.ChannelFile)
207         '''
208         try:        
209             # Does not wait the end of the command
210             (stdin, stdout, stderr) = self.ssh.exec_command(command)
211         except paramiko.SSHException:
212             message = src.KO_STATUS + _(
213                             ": the server failed to execute the command\n")
214             logger.write( src.printcolors.printcError(message))
215             return (None, None, None)
216         except:
217             logger.write( src.printcolors.printcError(src.KO_STATUS + '\n'))
218             return (None, None, None)
219         else:
220             return (stdin, stdout, stderr)
221
222     def close(self):
223         '''Close the ssh connection
224         
225         :rtype: N\A
226         '''
227         self.ssh.close()
228      
229     def write_info(self, logger):
230         '''Prints the informations relative to the machine in the logger 
231            (terminal traces and log file)
232         
233         :param logger src.logger.Logger: The logger instance
234         :return: Nothing
235         :rtype: N\A
236         '''
237         logger.write("host : " + self.host + "\n")
238         logger.write("port : " + str(self.port) + "\n")
239         logger.write("user : " + str(self.user) + "\n")
240         if self.successfully_connected(logger):
241             status = src.OK_STATUS
242         else:
243             status = src.KO_STATUS
244         logger.write("Connection : " + status + "\n\n") 
245
246
247 class Job(object):
248     '''Class to manage one job
249     '''
250     def __init__(self,
251                  name,
252                  machine,
253                  application,
254                  board, 
255                  commands,
256                  timeout,
257                  config,
258                  job_file_path,
259                  logger,
260                  after=None,
261                  prefix=None):
262
263         self.name = name
264         self.machine = machine
265         self.after = after
266         self.timeout = timeout
267         self.application = application
268         self.board = board
269         self.config = config
270         self.logger = logger
271         # The list of log files to download from the remote machine 
272         self.remote_log_files = []
273         
274         # The remote command status
275         # -1 means that it has not been launched, 
276         # 0 means success and 1 means fail
277         self.res_job = "-1"
278         self.cancelled = False
279         
280         self._T0 = -1
281         self._Tf = -1
282         self._has_begun = False
283         self._has_finished = False
284         self._has_timouted = False
285         self._stdin = None # Store the command inputs field
286         self._stdout = None # Store the command outputs field
287         self._stderr = None # Store the command errors field
288
289         self.out = ""
290         self.err = ""
291         
292         self.name_remote_jobs_pyconf = ".%s" % os.path.basename(job_file_path)
293         self.commands = commands
294         self.command = (os.path.join(self.machine.sat_path, "sat") +
295                         " -l " +
296                         os.path.join(self.machine.sat_path,
297                                      "list_log_files.txt") +
298                         " job --jobs_config " + 
299                         os.path.join(self.machine.sat_path,
300                                      self.name_remote_jobs_pyconf) +
301                         " --name " + self.name)
302         if prefix:
303             self.command = prefix + ' "' + self.command +'"'
304     
305     def get_pids(self):
306         """ Get the pid(s) corresponding to the command that have been launched
307             On the remote machine
308         
309         :return: The list of integers corresponding to the found pids
310         :rtype: List
311         """
312         pids = []
313         cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\''
314         (_, out_pid, _) = self.machine.exec_command(cmd_pid, self.logger)
315         pids_cmd = out_pid.readlines()
316         pids_cmd = [str(src.only_numbers(pid)) for pid in pids_cmd]
317         pids+=pids_cmd
318         return pids
319     
320     def kill_remote_process(self, wait=1):
321         '''Kills the process on the remote machine.
322         
323         :return: (the output of the kill, the error of the kill)
324         :rtype: (str, str)
325         '''
326         try:
327             pids = self.get_pids()
328         except:
329             return ("Unable to get the pid of the command.", "")
330             
331         cmd_kill = " ; ".join([("kill -2 " + pid) for pid in pids])
332         (_, out_kill, err_kill) = self.machine.exec_command(cmd_kill, 
333                                                             self.logger)
334         time.sleep(wait)
335         return (out_kill.read().decode(), err_kill.read().decode())
336             
337     def has_begun(self):
338         '''Returns True if the job has already begun
339         
340         :return: True if the job has already begun
341         :rtype: bool
342         '''
343         return self._has_begun
344     
345     def has_finished(self):
346         '''Returns True if the job has already finished 
347            (i.e. all the commands have been executed)
348            If it is finished, the outputs are stored in the fields out and err.
349         
350         :return: True if the job has already finished
351         :rtype: bool
352         '''
353         
354         # If the method has already been called and returned True
355         if self._has_finished:
356             return True
357         
358         # If the job has not begun yet
359         if not self.has_begun():
360             return False
361         
362         if self._stdout.channel.closed:
363             self._has_finished = True
364             # Store the result outputs
365             self.out += self._stdout.read().decode()
366             self.err += self._stderr.read().decode()
367             # Put end time
368             self._Tf = time.time()
369             # And get the remote command status and log files
370             try:
371                 self.get_log_files()
372             except Exception as e:
373                 self.err += _("Unable to get remote log files: %s" % e)
374         
375         return self._has_finished
376           
377     def get_log_files(self):
378         """Get the log files produced by the command launched 
379            on the remote machine, and put it in the log directory of the user,
380            so they can be accessible from 
381         """
382         # Do not get the files if the command is not finished
383         if not self.has_finished():
384             msg = _("Trying to get log files whereas the job is not finished.")
385             self.logger.write(src.printcolors.printcWarning(msg))
386             return
387         
388         # First get the file that contains the list of log files to get
389         tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
390         remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt")
391         self.machine.sftp.get(
392                     remote_path,
393                     tmp_file_path)
394         
395         # Read the file and get the result of the command and all the log files
396         # to get
397         fstream_tmp = open(tmp_file_path, "r")
398         file_lines = fstream_tmp.readlines()
399         file_lines = [line.replace("\n", "") for line in file_lines]
400         fstream_tmp.close()
401         os.remove(tmp_file_path)
402         
403         try :
404             # The first line is the result of the command (0 success or 1 fail)
405             self.res_job = file_lines[0]
406         except Exception as e:
407             self.err += _("Unable to get status from remote file %s: %s" % 
408                                                     (remote_path, str(e)))
409
410         for i, job_path_remote in enumerate(file_lines[1:]):
411             try:
412                 # For each command, there is two files to get :
413                 # 1- The xml file describing the command and giving the 
414                 # internal traces.
415                 # 2- The txt file containing the system command traces (like 
416                 # traces produced by the "make" command)
417                 # 3- In case of the test command, there is another file to get :
418                 # the xml board that contain the test results
419                 dirname = os.path.basename(os.path.dirname(job_path_remote))
420                 if dirname != 'OUT' and dirname != 'TEST':
421                     # Case 1-
422                     local_path = os.path.join(os.path.dirname(
423                                                         self.logger.logFilePath),
424                                               os.path.basename(job_path_remote))
425                     if i==0: # The first is the job command
426                         self.logger.add_link(os.path.basename(job_path_remote),
427                                              "job",
428                                              self.res_job,
429                                              self.command) 
430                 elif dirname == 'OUT':
431                     # Case 2-
432                     local_path = os.path.join(os.path.dirname(
433                                                         self.logger.logFilePath),
434                                               'OUT',
435                                               os.path.basename(job_path_remote))
436                 elif dirname == 'TEST':
437                     # Case 3-
438                     local_path = os.path.join(os.path.dirname(
439                                                         self.logger.logFilePath),
440                                               'TEST',
441                                               os.path.basename(job_path_remote))
442                 
443                 # Get the file
444                 if not os.path.exists(local_path):
445                     self.machine.sftp.get(job_path_remote, local_path)
446                 self.remote_log_files.append(local_path)
447             except Exception as e:
448                 self.err += _("Unable to get %s log file from remote: %s" % 
449                                                     (str(job_path_remote),
450                                                      str(e)))
451
452     def has_failed(self):
453         '''Returns True if the job has failed. 
454            A job is considered as failed if the machine could not be reached,
455            if the remote command failed, 
456            or if the job finished with a time out.
457         
458         :return: True if the job has failed
459         :rtype: bool
460         '''
461         if not self.has_finished():
462             return False
463         if not self.machine.successfully_connected(self.logger):
464             return True
465         if self.is_timeout():
466             return True
467         if self.res_job == "1":
468             return True
469         return False
470     
471     def cancel(self):
472         """In case of a failing job, one has to cancel every job that depend 
473            on it. This method put the job as failed and will not be executed.
474         """
475         if self.cancelled:
476             return
477         self._has_begun = True
478         self._has_finished = True
479         self.cancelled = True
480         self.out += _("This job was not launched because its father has failed.")
481         self.err += _("This job was not launched because its father has failed.")
482
483     def is_running(self):
484         '''Returns True if the job commands are running 
485         
486         :return: True if the job is running
487         :rtype: bool
488         '''
489         return self.has_begun() and not self.has_finished()
490
491     def is_timeout(self):
492         '''Returns True if the job commands has finished with timeout 
493         
494         :return: True if the job has finished with timeout
495         :rtype: bool
496         '''
497         return self._has_timouted
498
499     def time_elapsed(self):
500         """Get the time elapsed since the job launching
501         
502         :return: The number of seconds
503         :rtype: int
504         """
505         if not self.has_begun():
506             return -1
507         T_now = time.time()
508         return T_now - self._T0
509     
510     def check_time(self):
511         """Verify that the job has not exceeded its timeout.
512            If it has, kill the remote command and consider the job as finished.
513         """
514         if not self.has_begun():
515             return
516         if self.time_elapsed() > self.timeout:
517             self._has_finished = True
518             self._has_timouted = True
519             self._Tf = time.time()
520             (out_kill, __) = self.kill_remote_process()
521             self.out += "TIMEOUT \n" + out_kill
522             self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
523             try:
524                 self.get_log_files()
525             except Exception as e:
526                 self.err += _("Unable to get remote log files!\n%s\n" % str(e))
527             
528     def total_duration(self):
529         """Give the total duration of the job
530         
531         :return: the total duration of the job in seconds
532         :rtype: int
533         """
534         return self._Tf - self._T0
535         
536     def run(self):
537         """Launch the job by executing the remote command.
538         """
539         
540         # Prevent multiple run
541         if self.has_begun():
542             msg = _("Warning: A job can only be launched one time")
543             msg2 = _("Trying to launch the job \"%s\" whereas it has "
544                      "already been launched." % self.name)
545             self.logger.write(src.printcolors.printcWarning("%s\n%s\n" % (msg,
546                                                                         msg2)))
547             return
548         
549         # Do not execute the command if the machine could not be reached
550         if not self.machine.successfully_connected(self.logger):
551             self._has_finished = True
552             self.out = "N\A"
553             self.err += ("Connection to machine (name : %s, host: %s, port:"
554                         " %s, user: %s) has failed\nUse the log command "
555                         "to get more information."
556                         % (self.machine.name,
557                            self.machine.host,
558                            self.machine.port,
559                            self.machine.user))
560         else:
561             # Usual case : Launch the command on remote machine
562             self._T0 = time.time()
563             self._stdin, self._stdout, self._stderr = self.machine.exec_command(
564                                                                   self.command,
565                                                                   self.logger)
566             # If the results are not initialized, finish the job
567             if (self._stdin, self._stdout, self._stderr) == (None, None, None):
568                 self._has_finished = True
569                 self._Tf = time.time()
570                 self.out += "N\A"
571                 self.err += "The server failed to execute the command"
572         
573         # Put the beginning flag to true.
574         self._has_begun = True
575     
576     def write_results(self):
577         """Display on the terminal all the job's information
578         """
579         self.logger.write("name : " + self.name + "\n")
580         if self.after:
581             self.logger.write("after : %s\n" % self.after)
582         self.logger.write("Time elapsed : %4imin %2is \n" % 
583                      (self.total_duration()//60 , self.total_duration()%60))
584         if self._T0 != -1:
585             self.logger.write("Begin time : %s\n" % 
586                          time.strftime('%Y-%m-%d %H:%M:%S', 
587                                        time.localtime(self._T0)) )
588         if self._Tf != -1:
589             self.logger.write("End time   : %s\n\n" % 
590                          time.strftime('%Y-%m-%d %H:%M:%S', 
591                                        time.localtime(self._Tf)) )
592         
593         machine_head = "Informations about connection :\n"
594         underline = (len(machine_head) - 2) * "-"
595         self.logger.write(src.printcolors.printcInfo(
596                                                 machine_head+underline+"\n"))
597         self.machine.write_info(self.logger)
598         
599         self.logger.write(src.printcolors.printcInfo("out : \n"))
600         if self.out == "":
601             self.logger.write("Unable to get output\n")
602         else:
603             self.logger.write(self.out + "\n")
604         self.logger.write(src.printcolors.printcInfo("err : \n"))
605         self.logger.write(self.err + "\n")
606         
607     def get_status(self):
608         """Get the status of the job (used by the Gui for xml display)
609         
610         :return: The current status of the job
611         :rtype: String
612         """
613         if not self.machine.successfully_connected(self.logger):
614             return "SSH connection KO"
615         if not self.has_begun():
616             return "Not launched"
617         if self.cancelled:
618             return "Cancelled"
619         if self.is_running():
620             return "running since " + time.strftime('%Y-%m-%d %H:%M:%S',
621                                                     time.localtime(self._T0))        
622         if self.has_finished():
623             if self.is_timeout():
624                 return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S',
625                                                     time.localtime(self._Tf))
626             return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S',
627                                                      time.localtime(self._Tf))
628     
629 class Jobs(object):
630     '''Class to manage the jobs to be run
631     '''
632     def __init__(self,
633                  runner,
634                  logger,
635                  job_file_path,
636                  config_jobs,
637                  lenght_columns = 20):
638         # The jobs configuration
639         self.cfg_jobs = config_jobs
640         self.job_file_path = job_file_path
641         # The machine that will be used today
642         self.lmachines = []
643         # The list of machine (hosts, port) that will be used today 
644         # (a same host can have several machine instances since there 
645         # can be several ssh parameters) 
646         self.lhosts = []
647         # The jobs to be launched today 
648         self.ljobs = []
649         # The jobs that will not be launched today
650         self.ljobs_not_today = []
651         self.runner = runner
652         self.logger = logger
653         self.len_columns = lenght_columns
654         
655         # the list of jobs that have not been run yet
656         self._l_jobs_not_started = []
657         # the list of jobs that have already ran 
658         self._l_jobs_finished = []
659         # the list of jobs that are running 
660         self._l_jobs_running = [] 
661                 
662         self.determine_jobs_and_machines()
663     
664     def define_job(self, job_def, machine):
665         '''Takes a pyconf job definition and a machine (from class machine)
666            and returns the job instance corresponding to the definition.
667         
668         :param job_def src.config.Mapping: a job definition 
669         :param machine machine: the machine on which the job will run
670         :return: The corresponding job in a job class instance
671         :rtype: job
672         '''
673         name = job_def.name
674         cmmnds = job_def.commands
675         if not "timeout" in job_def:
676             timeout = 4*60*60 # default timeout = 4h
677         else:
678             timeout = job_def.timeout
679         after = None
680         if 'after' in job_def:
681             after = job_def.after
682         application = None
683         if 'application' in job_def:
684             application = job_def.application
685         board = None
686         if 'board' in job_def:
687             board = job_def.board
688         prefix = None
689         if "prefix" in job_def:
690             prefix = job_def.prefix
691             
692         return Job(name,
693                    machine,
694                    application,
695                    board,
696                    cmmnds,
697                    timeout,
698                    self.runner.cfg,
699                    self.job_file_path,
700                    self.logger,
701                    after = after,
702                    prefix = prefix)
703     
704     def determine_jobs_and_machines(self):
705         '''Function that reads the pyconf jobs definition and instantiates all
706            the machines and jobs to be done today.
707
708         :return: Nothing
709         :rtype: N\A
710         '''
711         today = datetime.date.weekday(datetime.date.today())
712         host_list = []
713                
714         for job_def in self.cfg_jobs.jobs :
715                 
716             if not "machine" in job_def:
717                 msg = _('WARNING: The job "%s" do not have the key '
718                        '"machine", this job is ignored.\n\n' % job_def.name)
719                 self.logger.write(src.printcolors.printcWarning(msg))
720                 continue
721             name_machine = job_def.machine
722             
723             a_machine = None
724             for mach in self.lmachines:
725                 if mach.name == name_machine:
726                     a_machine = mach
727                     break
728             
729             if a_machine == None:
730                 for machine_def in self.cfg_jobs.machines:
731                     if machine_def.name == name_machine:
732                         if 'host' not in machine_def:
733                             host = self.runner.cfg.VARS.hostname
734                         else:
735                             host = machine_def.host
736
737                         if 'user' not in machine_def:
738                             user = self.runner.cfg.VARS.user
739                         else:
740                             user = machine_def.user
741
742                         if 'port' not in machine_def:
743                             port = 22
744                         else:
745                             port = machine_def.port
746             
747                         if 'password' not in machine_def:
748                             passwd = None
749                         else:
750                             passwd = machine_def.password    
751                             
752                         if 'sat_path' not in machine_def:
753                             sat_path = "salomeTools"
754                         else:
755                             sat_path = machine_def.sat_path
756                         
757                         a_machine = Machine(
758                                             machine_def.name,
759                                             host,
760                                             user,
761                                             port=port,
762                                             passwd=passwd,
763                                             sat_path=sat_path
764                                             )
765                         
766                         self.lmachines.append(a_machine)
767                         if (host, port) not in host_list:
768                             host_list.append((host, port))
769                 
770                 if a_machine == None:
771                     msg = _("WARNING: The job \"%(job_name)s\" requires the "
772                             "machine \"%(machine_name)s\" but this machine "
773                             "is not defined in the configuration file.\n"
774                             "The job will not be launched\n")
775                     self.logger.write(src.printcolors.printcWarning(
776                                         msg % {"job_name" : job_def.name,
777                                                "machine_name" : name_machine}))
778                     continue
779                                   
780             a_job = self.define_job(job_def, a_machine)
781                 
782             if today in job_def.when:    
783                 self.ljobs.append(a_job)
784             else: # today in job_def.when
785                 self.ljobs_not_today.append(a_job)
786                
787         self.lhosts = host_list
788         
789     def ssh_connection_all_machines(self, pad=50):
790         '''Function that do the ssh connection to every machine 
791            to be used today.
792
793         :return: Nothing
794         :rtype: N\A
795         '''
796         self.logger.write(src.printcolors.printcInfo((
797                         "Establishing connection with all the machines :\n")))
798         for machine in self.lmachines:
799             # little algorithm in order to display traces
800             begin_line = (_("Connection to %s: " % machine.name))
801             if pad - len(begin_line) < 0:
802                 endline = " "
803             else:
804                 endline = (pad - len(begin_line)) * "." + " "
805             
806             step = "SSH connection"
807             self.logger.write( begin_line + endline + step)
808             self.logger.flush()
809             # the call to the method that initiate the ssh connection
810             msg = machine.connect(self.logger)
811             
812             # Copy salomeTools to the remote machine
813             if machine.successfully_connected(self.logger):
814                 step = _("Remove SAT")
815                 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
816                 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
817                 (__, out_dist, __) = machine.exec_command(
818                                                 "rm -rf %s" % machine.sat_path,
819                                                 self.logger)
820                 out_dist.read()
821                 
822                 self.logger.flush()
823                 step = _("Copy SAT")
824                 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
825                 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
826                 self.logger.flush()
827                 res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway,
828                                             self.job_file_path)
829
830                 # set the local settings of sat on the remote machine using
831                 # the init command
832                 (__, out_dist, __) = machine.exec_command(
833                                 os.path.join(machine.sat_path,
834                                     "sat init --base default --workdir"
835                                     " default --log_dir default"),
836                                 self.logger)
837                 out_dist.read()    
838                 
839                 # get the remote machine distribution using a sat command
840                 (__, out_dist, __) = machine.exec_command(
841                                 os.path.join(machine.sat_path,
842                                     "sat config --value VARS.dist --no_label"),
843                                 self.logger)
844                 machine.distribution = out_dist.read().decode().replace("\n",
845                                                                         "")
846                 
847                 # Print the status of the copy
848                 if res_copy == 0:
849                     self.logger.write('\r%s' % 
850                                 ((len(begin_line)+len(endline)+20) * " "), 3)
851                     self.logger.write('\r%s%s%s' % 
852                         (begin_line, 
853                          endline, 
854                          src.printcolors.printc(src.OK_STATUS)), 3)
855                 else:
856                     self.logger.write('\r%s' % 
857                             ((len(begin_line)+len(endline)+20) * " "), 3)
858                     self.logger.write('\r%s%s%s %s' % 
859                         (begin_line,
860                          endline,
861                          src.printcolors.printc(src.KO_STATUS),
862                          _("Copy of SAT failed: %s" % res_copy)), 3)
863             else:
864                 self.logger.write('\r%s' % 
865                                   ((len(begin_line)+len(endline)+20) * " "), 3)
866                 self.logger.write('\r%s%s%s %s' % 
867                     (begin_line,
868                      endline,
869                      src.printcolors.printc(src.KO_STATUS),
870                      msg), 3)
871             self.logger.write("\n", 3)
872                 
873         self.logger.write("\n")
874         
875
876     def is_occupied(self, hostname):
877         '''Function that returns True if a job is running on 
878            the machine defined by its host and its port.
879         
880         :param hostname (str, int): the pair (host, port)
881         :return: the job that is running on the host, 
882                 or false if there is no job running on the host. 
883         :rtype: job / bool
884         '''
885         host = hostname[0]
886         port = hostname[1]
887         for jb in self.ljobs:
888             if jb.machine.host == host and jb.machine.port == port:
889                 if jb.is_running():
890                     return jb
891         return False
892     
893     def update_jobs_states_list(self):
894         '''Function that updates the lists that store the currently
895            running jobs and the jobs that have already finished.
896         
897         :return: Nothing. 
898         :rtype: N\A
899         '''
900         jobs_finished_list = []
901         jobs_running_list = []
902         for jb in self.ljobs:
903             if jb.is_running():
904                 jobs_running_list.append(jb)
905                 jb.check_time()
906             if jb.has_finished():
907                 jobs_finished_list.append(jb)
908         
909         nb_job_finished_before = len(self._l_jobs_finished)
910         self._l_jobs_finished = jobs_finished_list
911         self._l_jobs_running = jobs_running_list
912         
913         nb_job_finished_now = len(self._l_jobs_finished)
914         
915         return nb_job_finished_now > nb_job_finished_before
916     
917     def cancel_dependencies_of_failing_jobs(self):
918         '''Function that cancels all the jobs that depend on a failing one.
919         
920         :return: Nothing. 
921         :rtype: N\A
922         '''
923         
924         for job in self.ljobs:
925             if job.after is None:
926                 continue
927             father_job = self.find_job_that_has_name(job.after)
928             if father_job is not None and father_job.has_failed():
929                 job.cancel()
930     
931     def find_job_that_has_name(self, name):
932         '''Returns the job by its name.
933         
934         :param name str: a job name
935         :return: the job that has the name. 
936         :rtype: job
937         '''
938         for jb in self.ljobs:
939             if jb.name == name:
940                 return jb
941         # the following is executed only if the job was not found
942         return None
943     
944     def str_of_length(self, text, length):
945         '''Takes a string text of any length and returns 
946            the most close string of length "length".
947         
948         :param text str: any string
949         :param length int: a length for the returned string
950         :return: the most close string of length "length"
951         :rtype: str
952         '''
953         if len(text) > length:
954             text_out = text[:length-3] + '...'
955         else:
956             diff = length - len(text)
957             before = " " * (diff//2)
958             after = " " * (diff//2 + diff%2)
959             text_out = before + text + after
960             
961         return text_out
962     
963     def display_status(self, len_col):
964         '''Takes a lenght and construct the display of the current status 
965            of the jobs in an array that has a column for each host.
966            It displays the job that is currently running on the host 
967            of the column.
968         
969         :param len_col int: the size of the column 
970         :return: Nothing
971         :rtype: N\A
972         '''
973         
974         display_line = ""
975         for host_port in self.lhosts:
976             jb = self.is_occupied(host_port)
977             if not jb: # nothing running on the host
978                 empty = self.str_of_length("empty", len_col)
979                 display_line += "|" + empty 
980             else:
981                 display_line += "|" + src.printcolors.printcInfo(
982                                         self.str_of_length(jb.name, len_col))
983         
984         self.logger.write("\r" + display_line + "|")
985         self.logger.flush()
986     
987
988     def run_jobs(self):
989         '''The main method. Runs all the jobs on every host. 
990            For each host, at a given time, only one job can be running.
991            The jobs that have the field after (that contain the job that has
992            to be run before it) are run after the previous job.
993            This method stops when all the jobs are finished.
994         
995         :return: Nothing
996         :rtype: N\A
997         '''
998
999         # Print header
1000         self.logger.write(src.printcolors.printcInfo(
1001                                                 _('Executing the jobs :\n')))
1002         text_line = ""
1003         for host_port in self.lhosts:
1004             host = host_port[0]
1005             port = host_port[1]
1006             if port == 22: # default value
1007                 text_line += "|" + self.str_of_length(host, self.len_columns)
1008             else:
1009                 text_line += "|" + self.str_of_length(
1010                                 "("+host+", "+str(port)+")", self.len_columns)
1011         
1012         tiret_line = " " + "-"*(len(text_line)-1) + "\n"
1013         self.logger.write(tiret_line)
1014         self.logger.write(text_line + "|\n")
1015         self.logger.write(tiret_line)
1016         self.logger.flush()
1017         
1018         # The infinite loop that runs the jobs
1019         l_jobs_not_started = src.deepcopy_list(self.ljobs)
1020         while len(self._l_jobs_finished) != len(self.ljobs):
1021             new_job_start = False
1022             for host_port in self.lhosts:
1023                 
1024                 if self.is_occupied(host_port):
1025                     continue
1026              
1027                 for jb in l_jobs_not_started:
1028                     if (jb.machine.host, jb.machine.port) != host_port:
1029                         continue 
1030                     if jb.after == None:
1031                         jb.run()
1032                         l_jobs_not_started.remove(jb)
1033                         new_job_start = True
1034                         break
1035                     else:
1036                         jb_before = self.find_job_that_has_name(jb.after)
1037                         if jb_before is None:
1038                             jb.cancel()
1039                             msg = _("This job was not launched because its "
1040                                     "father is not in the jobs list.")
1041                             jb.out = msg
1042                             jb.err = msg
1043                             break
1044                         if jb_before.has_finished():
1045                             jb.run()
1046                             l_jobs_not_started.remove(jb)
1047                             new_job_start = True
1048                             break
1049             self.cancel_dependencies_of_failing_jobs()
1050             new_job_finished = self.update_jobs_states_list()
1051             
1052             if new_job_start or new_job_finished:
1053                 if self.gui:
1054                     self.gui.update_xml_files(self.ljobs)            
1055                 # Display the current status     
1056                 self.display_status(self.len_columns)
1057             
1058             # Make sure that the proc is not entirely busy
1059             time.sleep(0.001)
1060         
1061         self.logger.write("\n")    
1062         self.logger.write(tiret_line)                   
1063         self.logger.write("\n\n")
1064         
1065         if self.gui:
1066             self.gui.update_xml_files(self.ljobs)
1067             self.gui.last_update()
1068
1069     def write_all_results(self):
1070         '''Display all the jobs outputs.
1071         
1072         :return: Nothing
1073         :rtype: N\A
1074         '''
1075         
1076         for jb in self.ljobs:
1077             self.logger.write(src.printcolors.printcLabel(
1078                         "#------- Results for job %s -------#\n" % jb.name))
1079             jb.write_results()
1080             self.logger.write("\n\n")
1081
1082 class Gui(object):
1083     '''Class to manage the the xml data that can be displayed in a browser to
1084        see the jobs states
1085     '''
1086    
1087     def __init__(self,
1088                  xml_dir_path,
1089                  l_jobs,
1090                  l_jobs_not_today,
1091                  prefix,
1092                  logger,
1093                  file_boards=""):
1094         '''Initialization
1095         
1096         :param xml_dir_path str: The path to the directory where to put 
1097                                  the xml resulting files
1098         :param l_jobs List: the list of jobs that run today
1099         :param l_jobs_not_today List: the list of jobs that do not run today
1100         :param file_boards str: the file path from which to read the
1101                                    expected boards
1102         '''
1103         # The logging instance
1104         self.logger = logger
1105         
1106         # The prefix to add to the xml files : date_hour
1107         self.prefix = prefix
1108         
1109         # The path of the csv files to read to fill the expected boards
1110         self.file_boards = file_boards
1111         
1112         if file_boards != "":
1113             today = datetime.date.weekday(datetime.date.today())
1114             self.parse_csv_boards(today)
1115         else:
1116             self.d_input_boards = {}
1117         
1118         # The path of the global xml file
1119         self.xml_dir_path = xml_dir_path
1120         # Initialize the xml files
1121         self.global_name = "global_report"
1122         xml_global_path = os.path.join(self.xml_dir_path,
1123                                        self.global_name + ".xml")
1124         self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path,
1125                                                          "JobsReport")
1126
1127         # Find history for each job
1128         self.history = {}
1129         self.find_history(l_jobs, l_jobs_not_today)
1130
1131         # The xml files that corresponds to the boards.
1132         # {name_board : xml_object}}
1133         self.d_xml_board_files = {}
1134
1135         # Create the lines and columns
1136         self.initialize_boards(l_jobs, l_jobs_not_today)
1137
1138         # Write the xml file
1139         self.update_xml_files(l_jobs)
1140     
1141     def add_xml_board(self, name):
1142         '''Add a board to the board list   
1143         :param name str: the board name
1144         '''
1145         xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
1146         self.d_xml_board_files[name] =  src.xmlManager.XmlLogFile(
1147                                                     xml_board_path,
1148                                                     "JobsReport")
1149         self.d_xml_board_files[name].add_simple_node("distributions")
1150         self.d_xml_board_files[name].add_simple_node("applications")
1151         self.d_xml_board_files[name].add_simple_node("board", text=name)
1152            
1153     def initialize_boards(self, l_jobs, l_jobs_not_today):
1154         '''Get all the first information needed for each file and write the 
1155            first version of the files   
1156         :param l_jobs List: the list of jobs that run today
1157         :param l_jobs_not_today List: the list of jobs that do not run today
1158         '''
1159         # Get the boards to fill and put it in a dictionary
1160         # {board_name : xml instance corresponding to the board}
1161         for job in l_jobs + l_jobs_not_today:
1162             board = job.board
1163             if (board is not None and 
1164                                 board not in self.d_xml_board_files.keys()):
1165                 self.add_xml_board(board)
1166         
1167         # Verify that the boards given as input are done
1168         for board in list(self.d_input_boards.keys()):
1169             if board not in self.d_xml_board_files:
1170                 self.add_xml_board(board)
1171             root_node = self.d_xml_board_files[board].xmlroot
1172             src.xmlManager.append_node_attrib(root_node, 
1173                                               {"input_file" : self.file_boards})
1174         
1175         # Loop over all jobs in order to get the lines and columns for each 
1176         # xml file
1177         d_dist = {}
1178         d_application = {}
1179         for board in self.d_xml_board_files:
1180             d_dist[board] = []
1181             d_application[board] = []
1182             
1183         l_hosts_ports = []
1184             
1185         for job in l_jobs + l_jobs_not_today:
1186             
1187             if (job.machine.host, job.machine.port) not in l_hosts_ports:
1188                 l_hosts_ports.append((job.machine.host, job.machine.port))
1189                 
1190             distrib = job.machine.distribution
1191             application = job.application
1192             
1193             board_job = job.board
1194             if board is None:
1195                 continue
1196             for board in self.d_xml_board_files:
1197                 if board_job == board:
1198                     if (distrib not in [None, ''] and 
1199                                             distrib not in d_dist[board]):
1200                         d_dist[board].append(distrib)
1201                         src.xmlManager.add_simple_node(
1202                             self.d_xml_board_files[board].xmlroot.find(
1203                                                             'distributions'),
1204                                                    "dist",
1205                                                    attrib={"name" : distrib})
1206                     
1207                 if board_job == board:
1208                     if (application not in [None, ''] and 
1209                                     application not in d_application[board]):
1210                         d_application[board].append(application)
1211                         src.xmlManager.add_simple_node(
1212                             self.d_xml_board_files[board].xmlroot.find(
1213                                                                 'applications'),
1214                                                    "application",
1215                                                    attrib={
1216                                                         "name" : application})
1217         
1218         # Verify that there are no missing application or distribution in the
1219         # xml board files (regarding the input boards)
1220         for board in self.d_xml_board_files:
1221             l_dist = d_dist[board]
1222             if board not in self.d_input_boards.keys():
1223                 continue
1224             for dist in self.d_input_boards[board]["rows"]:
1225                 if dist not in l_dist:
1226                     src.xmlManager.add_simple_node(
1227                             self.d_xml_board_files[board].xmlroot.find(
1228                                                             'distributions'),
1229                                                    "dist",
1230                                                    attrib={"name" : dist})
1231             l_appli = d_application[board]
1232             for appli in self.d_input_boards[board]["columns"]:
1233                 if appli not in l_appli:
1234                     src.xmlManager.add_simple_node(
1235                             self.d_xml_board_files[board].xmlroot.find(
1236                                                                 'applications'),
1237                                                    "application",
1238                                                    attrib={"name" : appli})
1239                 
1240         # Initialize the hosts_ports node for the global file
1241         self.xmlhosts_ports = self.xml_global_file.add_simple_node(
1242                                                                 "hosts_ports")
1243         for host, port in l_hosts_ports:
1244             host_port = "%s:%i" % (host, port)
1245             src.xmlManager.add_simple_node(self.xmlhosts_ports,
1246                                            "host_port",
1247                                            attrib={"name" : host_port})
1248         
1249         # Initialize the jobs node in all files
1250         for xml_file in [self.xml_global_file] + list(
1251                                             self.d_xml_board_files.values()):
1252             xml_jobs = xml_file.add_simple_node("jobs")      
1253             # Get the jobs present in the config file but 
1254             # that will not be launched today
1255             self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
1256             
1257             # add also the infos node
1258             xml_file.add_simple_node("infos",
1259                                      attrib={"name" : "last update",
1260                                              "JobsCommandStatus" : "running"})
1261             
1262             # and put the history node
1263             history_node = xml_file.add_simple_node("history")
1264             name_board = os.path.basename(xml_file.logFile)[:-len(".xml")]
1265             # serach for board files
1266             expression = "^[0-9]{8}_+[0-9]{6}_" + name_board + ".xml$"
1267             oExpr = re.compile(expression)
1268             # Get the list of xml borad files that are in the log directory
1269             for file_name in os.listdir(self.xml_dir_path):
1270                 if oExpr.search(file_name):
1271                     date = os.path.basename(file_name).split("_")[0]
1272                     file_path = os.path.join(self.xml_dir_path, file_name)
1273                     src.xmlManager.add_simple_node(history_node,
1274                                                    "link",
1275                                                    text=file_path,
1276                                                    attrib={"date" : date})      
1277             
1278                 
1279         # Find in each board the squares that needs to be filled regarding the
1280         # input csv files but that are not covered by a today job
1281         for board in self.d_input_boards.keys():
1282             xml_root_board = self.d_xml_board_files[board].xmlroot
1283             # Find the missing jobs for today
1284             xml_missing = src.xmlManager.add_simple_node(xml_root_board,
1285                                                  "missing_jobs")
1286             for row, column in self.d_input_boards[board]["jobs"]:
1287                 found = False
1288                 for job in l_jobs:
1289                     if (job.application == column and 
1290                         job.machine.distribution == row):
1291                         found = True
1292                         break
1293                 if not found:
1294                     src.xmlManager.add_simple_node(xml_missing,
1295                                             "job",
1296                                             attrib={"distribution" : row,
1297                                                     "application" : column })
1298             # Find the missing jobs not today
1299             xml_missing_not_today = src.xmlManager.add_simple_node(
1300                                                  xml_root_board,
1301                                                  "missing_jobs_not_today")
1302             for row, column in self.d_input_boards[board]["jobs_not_today"]:
1303                 found = False
1304                 for job in l_jobs_not_today:
1305                     if (job.application == column and 
1306                         job.machine.distribution == row):
1307                         found = True
1308                         break
1309                 if not found:
1310                     src.xmlManager.add_simple_node(xml_missing_not_today,
1311                                             "job",
1312                                             attrib={"distribution" : row,
1313                                                     "application" : column })
1314
1315     def find_history(self, l_jobs, l_jobs_not_today):
1316         """find, for each job, in the existent xml boards the results for the 
1317            job. Store the results in the dictionnary self.history = {name_job : 
1318            list of (date, status, list links)}
1319         
1320         :param l_jobs List: the list of jobs to run today   
1321         :param l_jobs_not_today List: the list of jobs that do not run today
1322         """
1323         # load the all the history
1324         expression = "^[0-9]{8}_+[0-9]{6}_" + self.global_name + ".xml$"
1325         oExpr = re.compile(expression)
1326         # Get the list of global xml that are in the log directory
1327         l_globalxml = []
1328         for file_name in os.listdir(self.xml_dir_path):
1329             if oExpr.search(file_name):
1330                 file_path = os.path.join(self.xml_dir_path, file_name)
1331                 try:
1332                     global_xml = src.xmlManager.ReadXmlFile(file_path)
1333                     l_globalxml.append(global_xml)
1334                 except Exception as e:
1335                     msg = _("\nWARNING: the file %s can not be read, it will be "
1336                             "ignored\n%s" % (file_path, e))
1337                     self.logger.write("%s\n" % src.printcolors.printcWarning(
1338                                                                         msg), 5)
1339                     
1340         # Construct the dictionnary self.history 
1341         for job in l_jobs + l_jobs_not_today:
1342             l_links = []
1343             for global_xml in l_globalxml:
1344                 date = os.path.basename(global_xml.filePath).split("_")[0]
1345                 global_root_node = global_xml.xmlroot.find("jobs")
1346                 job_node = src.xmlManager.find_node_by_attrib(
1347                                                               global_root_node,
1348                                                               "job",
1349                                                               "name",
1350                                                               job.name)
1351                 if job_node:
1352                     if job_node.find("remote_log_file_path") is not None:
1353                         link = job_node.find("remote_log_file_path").text
1354                         res_job = job_node.find("res").text
1355                         if link != "nothing":
1356                             l_links.append((date, res_job, link))
1357             l_links = sorted(l_links, reverse=True)
1358             self.history[job.name] = l_links
1359   
1360     def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
1361         '''Get all the first information needed for each file and write the 
1362            first version of the files   
1363
1364         :param xml_node_jobs etree.Element: the node corresponding to a job
1365         :param l_jobs_not_today List: the list of jobs that do not run today
1366         '''
1367         for job in l_jobs_not_today:
1368             xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1369                                                  "job",
1370                                                  attrib={"name" : job.name})
1371             src.xmlManager.add_simple_node(xmlj, "application", job.application)
1372             src.xmlManager.add_simple_node(xmlj,
1373                                            "distribution",
1374                                            job.machine.distribution)
1375             src.xmlManager.add_simple_node(xmlj, "board", job.board)
1376             src.xmlManager.add_simple_node(xmlj,
1377                                        "commands", " ; ".join(job.commands))
1378             src.xmlManager.add_simple_node(xmlj, "state", "Not today")
1379             src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1380             src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1381             src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1382             src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1383             src.xmlManager.add_simple_node(xmlj, "sat_path",
1384                                                         job.machine.sat_path)
1385             xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1386             for i, (date, res_job, link) in enumerate(self.history[job.name]):
1387                 if i==0:
1388                     # tag the first one (the last one)
1389                     src.xmlManager.add_simple_node(xml_history,
1390                                                    "link",
1391                                                    text=link,
1392                                                    attrib={"date" : date,
1393                                                            "res" : res_job,
1394                                                            "last" : "yes"})
1395                 else:
1396                     src.xmlManager.add_simple_node(xml_history,
1397                                                    "link",
1398                                                    text=link,
1399                                                    attrib={"date" : date,
1400                                                            "res" : res_job,
1401                                                            "last" : "no"})
1402
1403     def parse_csv_boards(self, today):
1404         """ Parse the csv file that describes the boards to produce and fill 
1405             the dict d_input_boards that contain the csv file contain
1406         
1407         :param today int: the current day of the week 
1408         """
1409         # open the csv file and read its content
1410         l_read = []
1411         with open(self.file_boards, 'r') as f:
1412             reader = csv.reader(f,delimiter=CSV_DELIMITER)
1413             for row in reader:
1414                 l_read.append(row)
1415         # get the delimiter for the boards (empty line)
1416         boards_delimiter = [''] * len(l_read[0])
1417         # Make the list of boards, by splitting with the delimiter
1418         l_boards = [list(y) for x, y in itertools.groupby(l_read,
1419                                     lambda z: z == boards_delimiter) if not x]
1420            
1421         # loop over the csv lists of lines and get the rows, columns and jobs
1422         d_boards = {}
1423         for input_board in l_boards:
1424             # get board name
1425             board_name = input_board[0][0]
1426             
1427             # Get columns list
1428             columns = input_board[0][1:]
1429             
1430             rows = []
1431             jobs = []
1432             jobs_not_today = []
1433             for line in input_board[1:]:
1434                 row = line[0]
1435                 rows.append(row)
1436                 for i, square in enumerate(line[1:]):
1437                     if square=='':
1438                         continue
1439                     days = square.split(DAYS_SEPARATOR)
1440                     days = [int(day) for day in days]
1441                     job = (row, columns[i])
1442                     if today in days:                           
1443                         jobs.append(job)
1444                     else:
1445                         jobs_not_today.append(job)
1446
1447             d_boards[board_name] = {"rows" : rows,
1448                                     "columns" : columns,
1449                                     "jobs" : jobs,
1450                                     "jobs_not_today" : jobs_not_today}
1451         
1452         self.d_input_boards = d_boards
1453
1454     def update_xml_files(self, l_jobs):
1455         '''Write all the xml files with updated information about the jobs   
1456
1457         :param l_jobs List: the list of jobs that run today
1458         '''
1459         for xml_file in [self.xml_global_file] + list(
1460                                             self.d_xml_board_files.values()):
1461             self.update_xml_file(l_jobs, xml_file)
1462             
1463         # Write the file
1464         self.write_xml_files()
1465             
1466     def update_xml_file(self, l_jobs, xml_file):      
1467         '''update information about the jobs for the file xml_file   
1468
1469         :param l_jobs List: the list of jobs that run today
1470         :param xml_file xmlManager.XmlLogFile: the xml instance to update
1471         '''
1472         
1473         xml_node_jobs = xml_file.xmlroot.find('jobs')
1474         # Update the job names and status node
1475         for job in l_jobs:
1476             # Find the node corresponding to the job and delete it
1477             # in order to recreate it
1478             for xmljob in xml_node_jobs.findall('job'):
1479                 if xmljob.attrib['name'] == job.name:
1480                     xml_node_jobs.remove(xmljob)
1481             
1482             T0 = str(job._T0)
1483             if T0 != "-1":
1484                 T0 = time.strftime('%Y-%m-%d %H:%M:%S', 
1485                                        time.localtime(job._T0))
1486             Tf = str(job._Tf)
1487             if Tf != "-1":
1488                 Tf = time.strftime('%Y-%m-%d %H:%M:%S', 
1489                                        time.localtime(job._Tf))
1490             
1491             # recreate the job node
1492             xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1493                                                   "job",
1494                                                   attrib={"name" : job.name})
1495             src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1496             src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1497             src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1498             src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1499             xml_history = src.xmlManager.add_simple_node(xmlj, "history")
1500             for date, res_job, link in self.history[job.name]:
1501                 src.xmlManager.add_simple_node(xml_history,
1502                                                "link",
1503                                                text=link,
1504                                                attrib={"date" : date,
1505                                                        "res" : res_job})
1506
1507             src.xmlManager.add_simple_node(xmlj, "sat_path",
1508                                            job.machine.sat_path)
1509             src.xmlManager.add_simple_node(xmlj, "application", job.application)
1510             src.xmlManager.add_simple_node(xmlj, "distribution",
1511                                            job.machine.distribution)
1512             src.xmlManager.add_simple_node(xmlj, "board", job.board)
1513             src.xmlManager.add_simple_node(xmlj, "timeout", str(job.timeout))
1514             src.xmlManager.add_simple_node(xmlj, "commands",
1515                                            " ; ".join(job.commands))
1516             src.xmlManager.add_simple_node(xmlj, "state", job.get_status())
1517             src.xmlManager.add_simple_node(xmlj, "begin", T0)
1518             src.xmlManager.add_simple_node(xmlj, "end", Tf)
1519             src.xmlManager.add_simple_node(xmlj, "out",
1520                                            src.printcolors.cleancolor(job.out))
1521             src.xmlManager.add_simple_node(xmlj, "err",
1522                                            src.printcolors.cleancolor(job.err))
1523             src.xmlManager.add_simple_node(xmlj, "res", str(job.res_job))
1524             if len(job.remote_log_files) > 0:
1525                 src.xmlManager.add_simple_node(xmlj,
1526                                                "remote_log_file_path",
1527                                                job.remote_log_files[0])
1528             else:
1529                 src.xmlManager.add_simple_node(xmlj,
1530                                                "remote_log_file_path",
1531                                                "nothing")           
1532             # Search for the test log if there is any
1533             l_test_log_files = self.find_test_log(job.remote_log_files)
1534             xml_test = src.xmlManager.add_simple_node(xmlj,
1535                                                       "test_log_file_path")
1536             for test_log_path, res_test, nb_fails in l_test_log_files:
1537                 test_path_node = src.xmlManager.add_simple_node(xml_test,
1538                                                "path",
1539                                                test_log_path)
1540                 test_path_node.attrib["res"] = res_test
1541                 test_path_node.attrib["nb_fails"] = nb_fails
1542             
1543             xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
1544             # get the job father
1545             if job.after is not None:
1546                 job_father = None
1547                 for jb in l_jobs:
1548                     if jb.name == job.after:
1549                         job_father = jb
1550                 
1551                 if (job_father is not None and 
1552                         len(job_father.remote_log_files) > 0):
1553                     link = job_father.remote_log_files[0]
1554                 else:
1555                     link = "nothing"
1556                 src.xmlManager.append_node_attrib(xmlafter, {"link" : link})
1557             
1558             # Verify that the job is to be done today regarding the input csv
1559             # files
1560             if job.board and job.board in self.d_input_boards.keys():
1561                 found = False
1562                 for dist, appli in self.d_input_boards[job.board]["jobs"]:
1563                     if (job.machine.distribution == dist 
1564                         and job.application == appli):
1565                         found = True
1566                         src.xmlManager.add_simple_node(xmlj,
1567                                                "extra_job",
1568                                                "no")
1569                         break
1570                 if not found:
1571                     src.xmlManager.add_simple_node(xmlj,
1572                                                "extra_job",
1573                                                "yes")
1574             
1575         
1576         # Update the date
1577         xml_node_infos = xml_file.xmlroot.find('infos')
1578         src.xmlManager.append_node_attrib(xml_node_infos,
1579                     attrib={"value" : 
1580                     datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
1581                
1582
1583     def find_test_log(self, l_remote_log_files):
1584         '''Find if there is a test log (board) in the remote log files and 
1585            the path to it. There can be several test command, so the result is
1586            a list.
1587
1588         :param l_remote_log_files List: the list of all remote log files
1589         :return: the list of (test log files path, res of the command)
1590         :rtype: List
1591         '''
1592         res = []
1593         for file_path in l_remote_log_files:
1594             dirname = os.path.basename(os.path.dirname(file_path))
1595             file_name = os.path.basename(file_path)
1596             regex = src.logger.log_all_command_file_expression
1597             oExpr = re.compile(regex)
1598             if dirname == "TEST" and oExpr.search(file_name):
1599                 # find the res of the command
1600                 prod_node = etree.parse(file_path).getroot().find("product")
1601                 res_test = prod_node.attrib["global_res"]
1602                 # find the number of fails
1603                 testbase_node = prod_node.find("tests").find("testbase")
1604                 nb_fails = int(testbase_node.attrib["failed"])
1605                 # put the file path, the res of the test command and the number 
1606                 # of fails in the output
1607                 res.append((file_path, res_test, nb_fails))
1608                 
1609         return res
1610     
1611     def last_update(self, finish_status = "finished"):
1612         '''update information about the jobs for the file xml_file   
1613
1614         :param l_jobs List: the list of jobs that run today
1615         :param xml_file xmlManager.XmlLogFile: the xml instance to update
1616         '''
1617         for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
1618             xml_node_infos = xml_file.xmlroot.find('infos')
1619             src.xmlManager.append_node_attrib(xml_node_infos,
1620                         attrib={"JobsCommandStatus" : finish_status})
1621         # Write the file
1622         self.write_xml_files()
1623
1624     def write_xml_file(self, xml_file, stylesheet):
1625         ''' Write one xml file and the same file with prefix
1626         '''
1627         xml_file.write_tree(stylesheet)
1628         file_path = xml_file.logFile
1629         file_dir = os.path.dirname(file_path)
1630         file_name = os.path.basename(file_path)
1631         file_name_with_prefix = self.prefix + "_" + file_name
1632         xml_file.write_tree(stylesheet, os.path.join(file_dir,
1633                                                      file_name_with_prefix))
1634         
1635     def write_xml_files(self):
1636         ''' Write the xml files   
1637         '''
1638         self.write_xml_file(self.xml_global_file, STYLESHEET_GLOBAL)
1639         for xml_file in self.d_xml_board_files.values():
1640             self.write_xml_file(xml_file, STYLESHEET_BOARD)
1641
1642 def get_config_file_path(job_config_name, l_cfg_dir):
1643     found = False
1644     file_jobs_cfg = None
1645     if os.path.exists(job_config_name) and job_config_name.endswith(".pyconf"):
1646         found = True
1647         file_jobs_cfg = job_config_name
1648     else:
1649         for cfg_dir in l_cfg_dir:
1650             file_jobs_cfg = os.path.join(cfg_dir, job_config_name)
1651             if not file_jobs_cfg.endswith('.pyconf'):
1652                 file_jobs_cfg += '.pyconf'
1653             
1654             if not os.path.exists(file_jobs_cfg):
1655                 continue
1656             else:
1657                 found = True
1658                 break
1659     return found, file_jobs_cfg
1660
1661 def develop_factorized_jobs(config_jobs):
1662     '''update information about the jobs for the file xml_file   
1663     
1664     :param config_jobs Config: the config corresponding to the jos description
1665     '''
1666     developed_jobs_list = []
1667     for jb in config_jobs.jobs:
1668         # case where the jobs are not developed
1669         if type(jb.machine) == type(""):
1670             developed_jobs_list.append(jb)
1671             continue
1672         # Case where the jobs must be developed
1673         # Example:
1674         # machine : ["CO7.2 physique", ["CO6.4 physique", $MONDAY, $TUESDAY ], "FD22"]
1675         name_job = jb.name
1676         for machine in jb.machine:
1677             new_job = src.pyconf.deepCopyMapping(jb)
1678             # case where there is a jobs on the machine corresponding to all
1679             # days in when variable. 
1680             if type(machine) == type(""):
1681                 new_job.machine = machine
1682                 new_job.name = name_job + " / " + machine
1683             else:
1684                 # case the days are re defined
1685                 new_job.machine = machine[0]
1686                 new_job.name = name_job + " / " + machine[0]
1687                 new_job.when = machine[1:]
1688             developed_jobs_list.append(new_job)
1689     
1690     config_jobs.jobs = developed_jobs_list
1691             
1692
1693 ##
1694 # Describes the command
1695 def description():
1696     return _("The jobs command launches maintenances that are described"
1697              " in the dedicated jobs configuration file.\n\nexample:\nsat "
1698              "jobs --name my_jobs --publish")
1699
1700 ##
1701 # Runs the command.
1702 def run(args, runner, logger):
1703        
1704     (options, args) = parser.parse_args(args)
1705        
1706     l_cfg_dir = runner.cfg.PATHS.JOBPATH
1707     
1708     # list option : display all the available config files
1709     if options.list:
1710         for cfg_dir in l_cfg_dir:
1711             if not options.no_label:
1712                 logger.write("------ %s\n" % 
1713                                  src.printcolors.printcHeader(cfg_dir))
1714             if not os.path.exists(cfg_dir):
1715                 continue
1716             for f in sorted(os.listdir(cfg_dir)):
1717                 if not f.endswith('.pyconf'):
1718                     continue
1719                 cfilename = f[:-7]
1720                 logger.write("%s\n" % cfilename)
1721         return 0
1722
1723     # Make sure the jobs_config option has been called
1724     if not options.jobs_cfg:
1725         message = _("The option --jobs_config is required\n")      
1726         src.printcolors.printcError(message)
1727         return 1
1728     
1729     # Find the file in the directories, unless it is a full path
1730     # merge all in a config
1731     merger = src.pyconf.ConfigMerger()
1732     config_jobs = src.pyconf.Config()
1733     l_conf_files_path = []
1734     for config_file in options.jobs_cfg:
1735         found, file_jobs_cfg = get_config_file_path(config_file, l_cfg_dir)
1736         if not found:
1737             msg = _("The file configuration %s was not found."
1738                     "\nUse the --list option to get the "
1739                     "possible files." % config_file)
1740             logger.write("%s\n" % src.printcolors.printcError(msg), 1)
1741             return 1
1742         l_conf_files_path.append(file_jobs_cfg)
1743         # Read the config that is in the file
1744         one_config_jobs = src.read_config_from_a_file(file_jobs_cfg)
1745         merger.merge(config_jobs, one_config_jobs)
1746     
1747     info = [
1748         (_("Platform"), runner.cfg.VARS.dist),
1749         (_("Files containing the jobs configuration"), l_conf_files_path)
1750     ]    
1751     src.print_info(logger, info)
1752
1753     if options.only_jobs:
1754         l_jb = src.pyconf.Sequence()
1755         for jb in config_jobs.jobs:
1756             if jb.name in options.only_jobs:
1757                 l_jb.append(jb,
1758                 "Job that was given in only_jobs option parameters\n")
1759         config_jobs.jobs = l_jb
1760     
1761     # Parse the config jobs in order to develop all the factorized jobs
1762     develop_factorized_jobs(config_jobs)
1763     
1764     # Make a unique file that contain all the jobs in order to use it 
1765     # on every machine
1766     name_pyconf = "_".join([os.path.basename(path)[:-len('.pyconf')] 
1767                             for path in l_conf_files_path]) + ".pyconf"
1768     path_pyconf = src.get_tmp_filename(runner.cfg, name_pyconf)
1769     #Save config
1770     f = file( path_pyconf , 'w')
1771     config_jobs.__save__(f)
1772     
1773     # log the paramiko problems
1774     log_dir = src.get_log_path(runner.cfg)
1775     paramiko_log_dir_path = os.path.join(log_dir, "JOBS")
1776     src.ensure_path_exists(paramiko_log_dir_path)
1777     paramiko.util.log_to_file(os.path.join(paramiko_log_dir_path,
1778                                            logger.txtFileName))
1779     
1780     # Initialization
1781     today_jobs = Jobs(runner,
1782                       logger,
1783                       path_pyconf,
1784                       config_jobs)
1785     
1786     # SSH connection to all machines
1787     today_jobs.ssh_connection_all_machines()
1788     if options.test_connection:
1789         return 0
1790     
1791     gui = None
1792     if options.publish:
1793         logger.write(src.printcolors.printcInfo(
1794                                         _("Initialize the xml boards : ")), 5)
1795         logger.flush()
1796         
1797         # Copy the stylesheets in the log directory 
1798         log_dir = log_dir
1799         xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
1800         files_to_copy = []
1801         files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
1802         files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD))
1803         files_to_copy.append(os.path.join(xsl_dir, "command.xsl"))
1804         files_to_copy.append(os.path.join(xsl_dir, "running.gif"))
1805         for file_path in files_to_copy:
1806             # OP We use copy instead of copy2 to update the creation date
1807             #    So we can clean the LOGS directories easily
1808             shutil.copy(file_path, log_dir)
1809         
1810         # Instanciate the Gui in order to produce the xml files that contain all
1811         # the boards
1812         gui = Gui(log_dir,
1813                   today_jobs.ljobs,
1814                   today_jobs.ljobs_not_today,
1815                   runner.cfg.VARS.datehour,
1816                   logger,
1817                   file_boards = options.input_boards)
1818         
1819         logger.write(src.printcolors.printcSuccess("OK"), 5)
1820         logger.write("\n\n", 5)
1821         logger.flush()
1822         
1823         # Display the list of the xml files
1824         logger.write(src.printcolors.printcInfo(("Here is the list of published"
1825                                                  " files :\n")), 4)
1826         logger.write("%s\n" % gui.xml_global_file.logFile, 4)
1827         for board in gui.d_xml_board_files.keys():
1828             file_path = gui.d_xml_board_files[board].logFile
1829             file_name = os.path.basename(file_path)
1830             logger.write("%s\n" % file_path, 4)
1831             logger.add_link(file_name, "board", 0, board)
1832               
1833         logger.write("\n", 4)
1834         
1835     today_jobs.gui = gui
1836     
1837     interruped = False
1838     try:
1839         # Run all the jobs contained in config_jobs
1840         today_jobs.run_jobs()
1841     except KeyboardInterrupt:
1842         interruped = True
1843         logger.write("\n\n%s\n\n" % 
1844                 (src.printcolors.printcWarning(_("Forced interruption"))), 1)
1845     except Exception as e:
1846         msg = _("CRITICAL ERROR: The jobs loop has been interrupted\n")
1847         logger.write("\n\n%s\n" % src.printcolors.printcError(msg) )
1848         logger.write("%s\n" % str(e))
1849         # get stack
1850         __, __, exc_traceback = sys.exc_info()
1851         fp = tempfile.TemporaryFile()
1852         traceback.print_tb(exc_traceback, file=fp)
1853         fp.seek(0)
1854         stack = fp.read()
1855         logger.write("\nTRACEBACK: %s\n" % stack.replace('"',"'"), 1)
1856         
1857     finally:
1858         res = 0
1859         if interruped:
1860             res = 1
1861             msg = _("Killing the running jobs and trying"
1862                     " to get the corresponding logs\n")
1863             logger.write(src.printcolors.printcWarning(msg))
1864             
1865         # find the potential not finished jobs and kill them
1866         for jb in today_jobs.ljobs:
1867             if not jb.has_finished():
1868                 res = 1
1869                 try:
1870                     jb.kill_remote_process()
1871                 except Exception as e:
1872                     msg = _("Failed to kill job %s: %s\n" % (jb.name, e))
1873                     logger.write(src.printcolors.printcWarning(msg))
1874             if jb.res_job != "0":
1875                 res = 1
1876         if interruped:
1877             if today_jobs.gui:
1878                 today_jobs.gui.last_update(_("Forced interruption"))
1879         else:
1880             if today_jobs.gui:
1881                 today_jobs.gui.last_update()
1882         # Output the results
1883         today_jobs.write_all_results()
1884         # Remove the temporary pyconf file
1885         if os.path.exists(path_pyconf):
1886             os.remove(path_pyconf)
1887         return res