Salome HOME
sat jobs : add an option that reads csv files and fills the boards with expected...
[tools/sat.git] / commands / jobs.py
1 #!/usr/bin/env python
2 #-*- coding:utf-8 -*-
3 #  Copyright (C) 2010-2013  CEA/DEN
4 #
5 #  This library is free software; you can redistribute it and/or
6 #  modify it under the terms of the GNU Lesser General Public
7 #  License as published by the Free Software Foundation; either
8 #  version 2.1 of the License.
9 #
10 #  This library is distributed in the hope that it will be useful,
11 #  but WITHOUT ANY WARRANTY; without even the implied warranty of
12 #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
13 #  Lesser General Public License for more details.
14 #
15 #  You should have received a copy of the GNU Lesser General Public
16 #  License along with this library; if not, write to the Free Software
17 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
18
19 import os
20 import datetime
21 import time
22 import csv
23 import shutil
24 import paramiko
25
26 import src
27
28 STYLESHEET_GLOBAL = "jobs_global_report.xsl"
29 STYLESHEET_BOARD = "jobs_board_report.xsl"
30 d_INT_DAY = {0 : "monday",
31              1 : "tuesday",
32              2 : "wednesday",
33              3 : "thursday",
34              4 : "friday",
35              5 : "saturday",
36              6 : "sunday"}
37
38 parser = src.options.Options()
39
40 parser.add_option('j', 'jobs_config', 'string', 'jobs_cfg', 
41                   _('The name of the config file that contains'
42                   ' the jobs configuration'))
43 parser.add_option('o', 'only_jobs', 'list2', 'only_jobs',
44                   _('Optional: the list of jobs to launch, by their name. '))
45 parser.add_option('l', 'list', 'boolean', 'list', 
46                   _('Optional: list all available config files.'))
47 parser.add_option('t', 'test_connection', 'boolean', 'test_connection',
48                   _("Optional: try to connect to the machines. "
49                     "Not executing the jobs."),
50                   False)
51 parser.add_option('p', 'publish', 'boolean', 'publish',
52                   _("Optional: generate an xml file that can be read in a "
53                     "browser to display the jobs status."),
54                   False)
55 parser.add_option('i', 'input_boards', 'list2', 'input_boards', _("Optional: "
56                                 "the list of path to csv files that contain "
57                                 "the expected boards."),[])
58 parser.add_option('n', 'completion', 'boolean', 'no_label',
59                   _("Optional (internal use): do not print labels, Works only "
60                     "with --list."),
61                   False)
62
63 class Machine(object):
64     '''Class to manage a ssh connection on a machine
65     '''
66     def __init__(self,
67                  name,
68                  host,
69                  user,
70                  port=22,
71                  passwd=None,
72                  sat_path="salomeTools"):
73         self.name = name
74         self.host = host
75         self.port = port
76         self.distribution = None # Will be filled after copying SAT on the machine
77         self.user = user
78         self.password = passwd
79         self.sat_path = sat_path
80         self.ssh = paramiko.SSHClient()
81         self._connection_successful = None
82     
83     def connect(self, logger):
84         '''Initiate the ssh connection to the remote machine
85         
86         :param logger src.logger.Logger: The logger instance 
87         :return: Nothing
88         :rtype: N\A
89         '''
90
91         self._connection_successful = False
92         self.ssh.load_system_host_keys()
93         self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
94         try:
95             self.ssh.connect(self.host,
96                              port=self.port,
97                              username=self.user,
98                              password = self.password)
99         except paramiko.AuthenticationException:
100             message = src.KO_STATUS + _("Authentication failed")
101         except paramiko.BadHostKeyException:
102             message = (src.KO_STATUS + 
103                        _("The server's host key could not be verified"))
104         except paramiko.SSHException:
105             message = ( _("SSHException error connecting or "
106                           "establishing an SSH session"))            
107         except:
108             message = ( _("Error connecting or establishing an SSH session"))
109         else:
110             self._connection_successful = True
111             message = ""
112         return message
113     
114     def successfully_connected(self, logger):
115         '''Verify if the connection to the remote machine has succeed
116         
117         :param logger src.logger.Logger: The logger instance 
118         :return: True if the connection has succeed, False if not
119         :rtype: bool
120         '''
121         if self._connection_successful == None:
122             message = _("Warning : trying to ask if the connection to "
123             "(name: %s host: %s, port: %s, user: %s) is OK whereas there were"
124             " no connection request" % 
125                         (self.name, self.host, self.port, self.user))
126             logger.write( src.printcolors.printcWarning(message))
127         return self._connection_successful
128
129     def copy_sat(self, sat_local_path, job_file):
130         '''Copy salomeTools to the remote machine in self.sat_path
131         '''
132         res = 0
133         try:
134             # open a sftp connection
135             self.sftp = self.ssh.open_sftp()
136             # Create the sat directory on remote machine if it is not existing
137             self.mkdir(self.sat_path, ignore_existing=True)
138             # Put sat
139             self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
140             # put the job configuration file in order to make it reachable 
141             # on the remote machine
142             job_file_name = os.path.basename(job_file)
143             self.sftp.put(job_file, os.path.join(self.sat_path,
144                                                  "data",
145                                                  "jobs",
146                                                  job_file_name))
147         except Exception as e:
148             res = str(e)
149             self._connection_successful = False
150         
151         return res
152         
153     def put_dir(self, source, target, filters = []):
154         ''' Uploads the contents of the source directory to the target path. The
155             target directory needs to exists. All sub-directories in source are 
156             created under target.
157         '''
158         for item in os.listdir(source):
159             if item in filters:
160                 continue
161             source_path = os.path.join(source, item)
162             destination_path = os.path.join(target, item)
163             if os.path.islink(source_path):
164                 linkto = os.readlink(source_path)
165                 try:
166                     self.sftp.symlink(linkto, destination_path)
167                     self.sftp.chmod(destination_path,
168                                     os.stat(source_path).st_mode)
169                 except IOError:
170                     pass
171             else:
172                 if os.path.isfile(source_path):
173                     self.sftp.put(source_path, destination_path)
174                     self.sftp.chmod(destination_path,
175                                     os.stat(source_path).st_mode)
176                 else:
177                     self.mkdir(destination_path, ignore_existing=True)
178                     self.put_dir(source_path, destination_path)
179
180     def mkdir(self, path, mode=511, ignore_existing=False):
181         ''' Augments mkdir by adding an option to not fail 
182             if the folder exists 
183         '''
184         try:
185             self.sftp.mkdir(path, mode)
186         except IOError:
187             if ignore_existing:
188                 pass
189             else:
190                 raise       
191     
192     def exec_command(self, command, logger):
193         '''Execute the command on the remote machine
194         
195         :param command str: The command to be run
196         :param logger src.logger.Logger: The logger instance 
197         :return: the stdin, stdout, and stderr of the executing command,
198                  as a 3-tuple
199         :rtype: (paramiko.channel.ChannelFile, paramiko.channel.ChannelFile,
200                 paramiko.channel.ChannelFile)
201         '''
202         try:        
203             # Does not wait the end of the command
204             (stdin, stdout, stderr) = self.ssh.exec_command(command)
205         except paramiko.SSHException:
206             message = src.KO_STATUS + _(
207                             ": the server failed to execute the command\n")
208             logger.write( src.printcolors.printcError(message))
209             return (None, None, None)
210         except:
211             logger.write( src.printcolors.printcError(src.KO_STATUS + '\n'))
212             return (None, None, None)
213         else:
214             return (stdin, stdout, stderr)
215
216     def close(self):
217         '''Close the ssh connection
218         
219         :rtype: N\A
220         '''
221         self.ssh.close()
222      
223     def write_info(self, logger):
224         '''Prints the informations relative to the machine in the logger 
225            (terminal traces and log file)
226         
227         :param logger src.logger.Logger: The logger instance
228         :return: Nothing
229         :rtype: N\A
230         '''
231         logger.write("host : " + self.host + "\n")
232         logger.write("port : " + str(self.port) + "\n")
233         logger.write("user : " + str(self.user) + "\n")
234         if self.successfully_connected(logger):
235             status = src.OK_STATUS
236         else:
237             status = src.KO_STATUS
238         logger.write("Connection : " + status + "\n\n") 
239
240
241 class Job(object):
242     '''Class to manage one job
243     '''
244     def __init__(self, name, machine, application, board, 
245                  commands, timeout, config, logger, job_file, after=None):
246
247         self.name = name
248         self.machine = machine
249         self.after = after
250         self.timeout = timeout
251         self.application = application
252         self.board = board
253         self.config = config
254         self.logger = logger
255         # The list of log files to download from the remote machine 
256         self.remote_log_files = []
257         
258         # The remote command status
259         # -1 means that it has not been launched, 
260         # 0 means success and 1 means fail
261         self.res_job = "-1"
262         self.cancelled = False
263         
264         self._T0 = -1
265         self._Tf = -1
266         self._has_begun = False
267         self._has_finished = False
268         self._has_timouted = False
269         self._stdin = None # Store the command inputs field
270         self._stdout = None # Store the command outputs field
271         self._stderr = None # Store the command errors field
272
273         self.out = None # Contains something only if the job is finished
274         self.err = None # Contains something only if the job is finished    
275                
276         self.commands = commands
277         self.command = (os.path.join(self.machine.sat_path, "sat") +
278                         " -l " +
279                         os.path.join(self.machine.sat_path,
280                                      "list_log_files.txt") +
281                         " job --jobs_config " +
282                         job_file +
283                         " --name " +
284                         self.name)
285     
286     def get_pids(self):
287         """ Get the pid(s) corresponding to the command that have been launched
288             On the remote machine
289         
290         :return: The list of integers corresponding to the found pids
291         :rtype: List
292         """
293         pids = []
294         cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\''
295         (_, out_pid, _) = self.machine.exec_command(cmd_pid, self.logger)
296         pids_cmd = out_pid.readlines()
297         pids_cmd = [str(src.only_numbers(pid)) for pid in pids_cmd]
298         pids+=pids_cmd
299         return pids
300     
301     def kill_remote_process(self, wait=1):
302         '''Kills the process on the remote machine.
303         
304         :return: (the output of the kill, the error of the kill)
305         :rtype: (str, str)
306         '''
307         
308         pids = self.get_pids()
309         cmd_kill = " ; ".join([("kill -2 " + pid) for pid in pids])
310         (_, out_kill, err_kill) = self.machine.exec_command(cmd_kill, 
311                                                             self.logger)
312         time.sleep(wait)
313         return (out_kill, err_kill)
314             
315     def has_begun(self):
316         '''Returns True if the job has already begun
317         
318         :return: True if the job has already begun
319         :rtype: bool
320         '''
321         return self._has_begun
322     
323     def has_finished(self):
324         '''Returns True if the job has already finished 
325            (i.e. all the commands have been executed)
326            If it is finished, the outputs are stored in the fields out and err.
327         
328         :return: True if the job has already finished
329         :rtype: bool
330         '''
331         
332         # If the method has already been called and returned True
333         if self._has_finished:
334             return True
335         
336         # If the job has not begun yet
337         if not self.has_begun():
338             return False
339         
340         if self._stdout.channel.closed:
341             self._has_finished = True
342             # Store the result outputs
343             self.out = self._stdout.read().decode()
344             self.err = self._stderr.read().decode()
345             # Put end time
346             self._Tf = time.time()
347             # And get the remote command status and log files
348             self.get_log_files()
349         
350         return self._has_finished
351           
352     def get_log_files(self):
353         """Get the log files produced by the command launched 
354            on the remote machine.
355         """
356         # Do not get the files if the command is not finished
357         if not self.has_finished():
358             msg = _("Trying to get log files whereas the job is not finished.")
359             self.logger.write(src.printcolors.printcWarning(msg))
360             return
361         
362         # First get the file that contains the list of log files to get
363         tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
364         self.machine.sftp.get(
365                     os.path.join(self.machine.sat_path, "list_log_files.txt"),
366                     tmp_file_path)
367         
368         # Read the file and get the result of the command and all the log files
369         # to get
370         fstream_tmp = open(tmp_file_path, "r")
371         file_lines = fstream_tmp.readlines()
372         file_lines = [line.replace("\n", "") for line in file_lines]
373         fstream_tmp.close()
374         os.remove(tmp_file_path)
375         # The first line is the result of the command (0 success or 1 fail)
376         self.res_job = file_lines[0]
377
378         for i, job_path_remote in enumerate(file_lines[1:]):
379             try:
380                 # For each command, there is two files to get :
381                 # 1- The xml file describing the command and giving the 
382                 # internal traces.
383                 # 2- The txt file containing the system command traces (like 
384                 # traces produced by the "make" command)
385                 if os.path.basename(os.path.dirname(job_path_remote)) != 'OUT':
386                     # Case 1-
387                     local_path = os.path.join(os.path.dirname(
388                                                         self.logger.logFilePath),
389                                               os.path.basename(job_path_remote))
390                     if i==0: # The first is the job command
391                         self.logger.add_link(os.path.basename(job_path_remote),
392                                              "job",
393                                              self.res_job,
394                                              self.command) 
395                 else:
396                     # Case 2-
397                     local_path = os.path.join(os.path.dirname(
398                                                         self.logger.logFilePath),
399                                               'OUT',
400                                               os.path.basename(job_path_remote))
401                 # Get the file
402                 if not os.path.exists(local_path):
403                     self.machine.sftp.get(job_path_remote, local_path)
404                 self.remote_log_files.append(local_path)
405             except Exception as e:
406                 self.err += _("Unable to get %s log file from remote: %s" % 
407                                                     (job_path_remote, str(e)))
408
409     def has_failed(self):
410         '''Returns True if the job has failed. 
411            A job is considered as failed if the machine could not be reached,
412            if the remote command failed, 
413            or if the job finished with a time out.
414         
415         :return: True if the job has failed
416         :rtype: bool
417         '''
418         if not self.has_finished():
419             return False
420         if not self.machine.successfully_connected(self.logger):
421             return True
422         if self.is_timeout():
423             return True
424         if self.res_job == "1":
425             return True
426         return False
427     
428     def cancel(self):
429         """In case of a failing job, one has to cancel every job that depend 
430            on it. This method put the job as failed and will not be executed.
431         """
432         self._has_begun = True
433         self._has_finished = True
434         self.cancelled = True
435         self.out = _("This job was not launched because its father has failed.")
436         self.err = _("This job was not launched because its father has failed.")
437
438     def is_running(self):
439         '''Returns True if the job commands are running 
440         
441         :return: True if the job is running
442         :rtype: bool
443         '''
444         return self.has_begun() and not self.has_finished()
445
446     def is_timeout(self):
447         '''Returns True if the job commands has finished with timeout 
448         
449         :return: True if the job has finished with timeout
450         :rtype: bool
451         '''
452         return self._has_timouted
453
454     def time_elapsed(self):
455         """Get the time elapsed since the job launching
456         
457         :return: The number of seconds
458         :rtype: int
459         """
460         if not self.has_begun():
461             return -1
462         T_now = time.time()
463         return T_now - self._T0
464     
465     def check_time(self):
466         """Verify that the job has not exceeded its timeout.
467            If it has, kill the remote command and consider the job as finished.
468         """
469         if not self.has_begun():
470             return
471         if self.time_elapsed() > self.timeout:
472             self._has_finished = True
473             self._has_timouted = True
474             self._Tf = time.time()
475             self.get_pids()
476             (out_kill, _) = self.kill_remote_process()
477             self.out = "TIMEOUT \n" + out_kill.read().decode()
478             self.err = "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
479             try:
480                 self.get_log_files()
481             except Exception as e:
482                 self.err += _("Unable to get remote log files: %s" % e)
483             
484     def total_duration(self):
485         """Give the total duration of the job
486         
487         :return: the total duration of the job in seconds
488         :rtype: int
489         """
490         return self._Tf - self._T0
491         
492     def run(self):
493         """Launch the job by executing the remote command.
494         """
495         
496         # Prevent multiple run
497         if self.has_begun():
498             msg = _("Warning: A job can only be launched one time")
499             msg2 = _("Trying to launch the job \"%s\" whereas it has "
500                      "already been launched." % self.name)
501             self.logger.write(src.printcolors.printcWarning("%s\n%s\n" % (msg,
502                                                                         msg2)))
503             return
504         
505         # Do not execute the command if the machine could not be reached
506         if not self.machine.successfully_connected(self.logger):
507             self._has_finished = True
508             self.out = "N\A"
509             self.err = ("Connection to machine (name : %s, host: %s, port:"
510                         " %s, user: %s) has failed\nUse the log command "
511                         "to get more information."
512                         % (self.machine.name,
513                            self.machine.host,
514                            self.machine.port,
515                            self.machine.user))
516         else:
517             # Usual case : Launch the command on remote machine
518             self._T0 = time.time()
519             self._stdin, self._stdout, self._stderr = self.machine.exec_command(
520                                                                   self.command,
521                                                                   self.logger)
522             # If the results are not initialized, finish the job
523             if (self._stdin, self._stdout, self._stderr) == (None, None, None):
524                 self._has_finished = True
525                 self._Tf = time.time()
526                 self.out = "N\A"
527                 self.err = "The server failed to execute the command"
528         
529         # Put the beginning flag to true.
530         self._has_begun = True
531     
532     def write_results(self):
533         """Display on the terminal all the job's information
534         """
535         self.logger.write("name : " + self.name + "\n")
536         if self.after:
537             self.logger.write("after : %s\n" % self.after)
538         self.logger.write("Time elapsed : %4imin %2is \n" % 
539                      (self.total_duration()//60 , self.total_duration()%60))
540         if self._T0 != -1:
541             self.logger.write("Begin time : %s\n" % 
542                          time.strftime('%Y-%m-%d %H:%M:%S', 
543                                        time.localtime(self._T0)) )
544         if self._Tf != -1:
545             self.logger.write("End time   : %s\n\n" % 
546                          time.strftime('%Y-%m-%d %H:%M:%S', 
547                                        time.localtime(self._Tf)) )
548         
549         machine_head = "Informations about connection :\n"
550         underline = (len(machine_head) - 2) * "-"
551         self.logger.write(src.printcolors.printcInfo(
552                                                 machine_head+underline+"\n"))
553         self.machine.write_info(self.logger)
554         
555         self.logger.write(src.printcolors.printcInfo("out : \n"))
556         if self.out is None:
557             self.logger.write("Unable to get output\n")
558         else:
559             self.logger.write(self.out + "\n")
560         self.logger.write(src.printcolors.printcInfo("err : \n"))
561         if self.err is None:
562             self.logger.write("Unable to get error\n")
563         else:
564             self.logger.write(self.err + "\n")
565         
566     def get_status(self):
567         """Get the status of the job (used by the Gui for xml display)
568         
569         :return: The current status of the job
570         :rtype: String
571         """
572         if not self.machine.successfully_connected(self.logger):
573             return "SSH connection KO"
574         if not self.has_begun():
575             return "Not launched"
576         if self.cancelled:
577             return "Cancelled"
578         if self.is_running():
579             return "running since " + time.strftime('%Y-%m-%d %H:%M:%S',
580                                                     time.localtime(self._T0))        
581         if self.has_finished():
582             if self.is_timeout():
583                 return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S',
584                                                     time.localtime(self._Tf))
585             return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S',
586                                                      time.localtime(self._Tf))
587     
588 class Jobs(object):
589     '''Class to manage the jobs to be run
590     '''
591     def __init__(self,
592                  runner,
593                  logger,
594                  job_file,
595                  job_file_path,
596                  config_jobs,
597                  lenght_columns = 20):
598         # The jobs configuration
599         self.cfg_jobs = config_jobs
600         self.job_file = job_file
601         self.job_file_path = job_file_path
602         # The machine that will be used today
603         self.lmachines = []
604         # The list of machine (hosts, port) that will be used today 
605         # (a same host can have several machine instances since there 
606         # can be several ssh parameters) 
607         self.lhosts = []
608         # The jobs to be launched today 
609         self.ljobs = []
610         # The jobs that will not be launched today
611         self.ljobs_not_today = []
612         self.runner = runner
613         self.logger = logger
614         self.len_columns = lenght_columns
615         
616         # the list of jobs that have not been run yet
617         self._l_jobs_not_started = []
618         # the list of jobs that have already ran 
619         self._l_jobs_finished = []
620         # the list of jobs that are running 
621         self._l_jobs_running = [] 
622                 
623         self.determine_jobs_and_machines()
624     
625     def define_job(self, job_def, machine):
626         '''Takes a pyconf job definition and a machine (from class machine)
627            and returns the job instance corresponding to the definition.
628         
629         :param job_def src.config.Mapping: a job definition 
630         :param machine machine: the machine on which the job will run
631         :return: The corresponding job in a job class instance
632         :rtype: job
633         '''
634         name = job_def.name
635         cmmnds = job_def.commands
636         timeout = job_def.timeout
637         after = None
638         if 'after' in job_def:
639             after = job_def.after
640         application = None
641         if 'application' in job_def:
642             application = job_def.application
643         board = None
644         if 'board' in job_def:
645             board = job_def.board
646             
647         return Job(name,
648                    machine,
649                    application,
650                    board,
651                    cmmnds,
652                    timeout,
653                    self.runner.cfg,
654                    self.logger,
655                    self.job_file,
656                    after = after)
657     
658     def determine_jobs_and_machines(self):
659         '''Function that reads the pyconf jobs definition and instantiates all
660            the machines and jobs to be done today.
661
662         :return: Nothing
663         :rtype: N\A
664         '''
665         today = datetime.date.weekday(datetime.date.today())
666         host_list = []
667                
668         for job_def in self.cfg_jobs.jobs :
669                 
670             if not "machine" in job_def:
671                 msg = _('WARNING: The job "%s" do not have the key '
672                        '"machine", this job is ignored.\n\n' % job_def.name)
673                 self.logger.write(src.printcolors.printcWarning(msg))
674                 continue
675             name_machine = job_def.machine
676             
677             a_machine = None
678             for mach in self.lmachines:
679                 if mach.name == name_machine:
680                     a_machine = mach
681                     break
682             
683             if a_machine == None:
684                 for machine_def in self.cfg_jobs.machines:
685                     if machine_def.name == name_machine:
686                         if 'host' not in machine_def:
687                             host = self.runner.cfg.VARS.hostname
688                         else:
689                             host = machine_def.host
690
691                         if 'user' not in machine_def:
692                             user = self.runner.cfg.VARS.user
693                         else:
694                             user = machine_def.user
695
696                         if 'port' not in machine_def:
697                             port = 22
698                         else:
699                             port = machine_def.port
700             
701                         if 'password' not in machine_def:
702                             passwd = None
703                         else:
704                             passwd = machine_def.password    
705                             
706                         if 'sat_path' not in machine_def:
707                             sat_path = "salomeTools"
708                         else:
709                             sat_path = machine_def.sat_path
710                         
711                         a_machine = Machine(
712                                             machine_def.name,
713                                             host,
714                                             user,
715                                             port=port,
716                                             passwd=passwd,
717                                             sat_path=sat_path
718                                             )
719                         
720                         self.lmachines.append(a_machine)
721                         if (host, port) not in host_list:
722                             host_list.append((host, port))
723                 
724                 if a_machine == None:
725                     msg = _("WARNING: The job \"%(job_name)s\" requires the "
726                             "machine \"%(machine_name)s\" but this machine "
727                             "is not defined in the configuration file.\n"
728                             "The job will not be launched")
729                     self.logger.write(src.printcolors.printcWarning(msg))
730                                   
731             a_job = self.define_job(job_def, a_machine)
732                 
733             if today in job_def.when:    
734                 self.ljobs.append(a_job)
735             else: # today in job_def.when
736                 self.ljobs_not_today.append(a_job)
737                
738         self.lhosts = host_list
739         
740     def ssh_connection_all_machines(self, pad=50):
741         '''Function that do the ssh connection to every machine 
742            to be used today.
743
744         :return: Nothing
745         :rtype: N\A
746         '''
747         self.logger.write(src.printcolors.printcInfo((
748                         "Establishing connection with all the machines :\n")))
749         for machine in self.lmachines:
750             # little algorithm in order to display traces
751             begin_line = (_("Connection to %s: " % machine.name))
752             if pad - len(begin_line) < 0:
753                 endline = " "
754             else:
755                 endline = (pad - len(begin_line)) * "." + " "
756             
757             step = "SSH connection"
758             self.logger.write( begin_line + endline + step)
759             self.logger.flush()
760             # the call to the method that initiate the ssh connection
761             msg = machine.connect(self.logger)
762             
763             # Copy salomeTools to the remote machine
764             if machine.successfully_connected(self.logger):
765                 step = _("Copy SAT")
766                 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
767                 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
768                 self.logger.flush()
769                 res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway,
770                                             self.job_file_path)
771                 # get the remote machine distribution using a sat command
772                 (__, out_dist, __) = machine.exec_command(
773                                 os.path.join(machine.sat_path,
774                                     "sat config --value VARS.dist --no_label"),
775                                 self.logger)
776                 machine.distribution = out_dist.read().decode().replace("\n",
777                                                                         "")
778                 # Print the status of the copy
779                 if res_copy == 0:
780                     self.logger.write('\r%s' % 
781                                 ((len(begin_line)+len(endline)+20) * " "), 3)
782                     self.logger.write('\r%s%s%s' % 
783                         (begin_line, 
784                          endline, 
785                          src.printcolors.printc(src.OK_STATUS)), 3)
786                 else:
787                     self.logger.write('\r%s' % 
788                             ((len(begin_line)+len(endline)+20) * " "), 3)
789                     self.logger.write('\r%s%s%s %s' % 
790                         (begin_line,
791                          endline,
792                          src.printcolors.printc(src.OK_STATUS),
793                          _("Copy of SAT failed")), 3)
794             else:
795                 self.logger.write('\r%s' % 
796                                   ((len(begin_line)+len(endline)+20) * " "), 3)
797                 self.logger.write('\r%s%s%s %s' % 
798                     (begin_line,
799                      endline,
800                      src.printcolors.printc(src.KO_STATUS),
801                      msg), 3)
802             self.logger.write("\n", 3)
803                 
804         self.logger.write("\n")
805         
806
807     def is_occupied(self, hostname):
808         '''Function that returns True if a job is running on 
809            the machine defined by its host and its port.
810         
811         :param hostname (str, int): the pair (host, port)
812         :return: the job that is running on the host, 
813                 or false if there is no job running on the host. 
814         :rtype: job / bool
815         '''
816         host = hostname[0]
817         port = hostname[1]
818         for jb in self.ljobs:
819             if jb.machine.host == host and jb.machine.port == port:
820                 if jb.is_running():
821                     return jb
822         return False
823     
824     def update_jobs_states_list(self):
825         '''Function that updates the lists that store the currently
826            running jobs and the jobs that have already finished.
827         
828         :return: Nothing. 
829         :rtype: N\A
830         '''
831         jobs_finished_list = []
832         jobs_running_list = []
833         for jb in self.ljobs:
834             if jb.is_running():
835                 jobs_running_list.append(jb)
836                 jb.check_time()
837             if jb.has_finished():
838                 jobs_finished_list.append(jb)
839         
840         nb_job_finished_before = len(self._l_jobs_finished)
841         self._l_jobs_finished = jobs_finished_list
842         self._l_jobs_running = jobs_running_list
843         
844         nb_job_finished_now = len(self._l_jobs_finished)
845         
846         return nb_job_finished_now > nb_job_finished_before
847     
848     def cancel_dependencies_of_failing_jobs(self):
849         '''Function that cancels all the jobs that depend on a failing one.
850         
851         :return: Nothing. 
852         :rtype: N\A
853         '''
854         
855         for job in self.ljobs:
856             if job.after is None:
857                 continue
858             father_job = self.find_job_that_has_name(job.after)
859             if father_job is not None and father_job.has_failed():
860                 job.cancel()
861     
862     def find_job_that_has_name(self, name):
863         '''Returns the job by its name.
864         
865         :param name str: a job name
866         :return: the job that has the name. 
867         :rtype: job
868         '''
869         for jb in self.ljobs:
870             if jb.name == name:
871                 return jb
872         # the following is executed only if the job was not found
873         return None
874     
875     def str_of_length(self, text, length):
876         '''Takes a string text of any length and returns 
877            the most close string of length "length".
878         
879         :param text str: any string
880         :param length int: a length for the returned string
881         :return: the most close string of length "length"
882         :rtype: str
883         '''
884         if len(text) > length:
885             text_out = text[:length-3] + '...'
886         else:
887             diff = length - len(text)
888             before = " " * (diff//2)
889             after = " " * (diff//2 + diff%2)
890             text_out = before + text + after
891             
892         return text_out
893     
894     def display_status(self, len_col):
895         '''Takes a lenght and construct the display of the current status 
896            of the jobs in an array that has a column for each host.
897            It displays the job that is currently running on the host 
898            of the column.
899         
900         :param len_col int: the size of the column 
901         :return: Nothing
902         :rtype: N\A
903         '''
904         
905         display_line = ""
906         for host_port in self.lhosts:
907             jb = self.is_occupied(host_port)
908             if not jb: # nothing running on the host
909                 empty = self.str_of_length("empty", len_col)
910                 display_line += "|" + empty 
911             else:
912                 display_line += "|" + src.printcolors.printcInfo(
913                                         self.str_of_length(jb.name, len_col))
914         
915         self.logger.write("\r" + display_line + "|")
916         self.logger.flush()
917     
918
919     def run_jobs(self):
920         '''The main method. Runs all the jobs on every host. 
921            For each host, at a given time, only one job can be running.
922            The jobs that have the field after (that contain the job that has
923            to be run before it) are run after the previous job.
924            This method stops when all the jobs are finished.
925         
926         :return: Nothing
927         :rtype: N\A
928         '''
929
930         # Print header
931         self.logger.write(src.printcolors.printcInfo(
932                                                 _('Executing the jobs :\n')))
933         text_line = ""
934         for host_port in self.lhosts:
935             host = host_port[0]
936             port = host_port[1]
937             if port == 22: # default value
938                 text_line += "|" + self.str_of_length(host, self.len_columns)
939             else:
940                 text_line += "|" + self.str_of_length(
941                                 "("+host+", "+str(port)+")", self.len_columns)
942         
943         tiret_line = " " + "-"*(len(text_line)-1) + "\n"
944         self.logger.write(tiret_line)
945         self.logger.write(text_line + "|\n")
946         self.logger.write(tiret_line)
947         self.logger.flush()
948         
949         # The infinite loop that runs the jobs
950         l_jobs_not_started = src.deepcopy_list(self.ljobs)
951         while len(self._l_jobs_finished) != len(self.ljobs):
952             new_job_start = False
953             for host_port in self.lhosts:
954                 
955                 if self.is_occupied(host_port):
956                     continue
957              
958                 for jb in l_jobs_not_started:
959                     if (jb.machine.host, jb.machine.port) != host_port:
960                         continue 
961                     if jb.after == None:
962                         jb.run()
963                         l_jobs_not_started.remove(jb)
964                         new_job_start = True
965                         break
966                     else:
967                         jb_before = self.find_job_that_has_name(jb.after)
968                         if jb_before is None:
969                             jb.cancel()
970                             msg = _("This job was not launched because its "
971                                     "father is not in the jobs list.")
972                             jb.out = msg
973                             jb.err = msg
974                             break
975                         if jb_before.has_finished():
976                             jb.run()
977                             l_jobs_not_started.remove(jb)
978                             new_job_start = True
979                             break
980             self.cancel_dependencies_of_failing_jobs()
981             new_job_finished = self.update_jobs_states_list()
982             
983             if new_job_start or new_job_finished:
984                 if self.gui:
985                     self.gui.update_xml_files(self.ljobs)            
986                 # Display the current status     
987                 self.display_status(self.len_columns)
988             
989             # Make sure that the proc is not entirely busy
990             time.sleep(0.001)
991         
992         self.logger.write("\n")    
993         self.logger.write(tiret_line)                   
994         self.logger.write("\n\n")
995         
996         if self.gui:
997             self.gui.update_xml_files(self.ljobs)
998             self.gui.last_update()
999
1000     def write_all_results(self):
1001         '''Display all the jobs outputs.
1002         
1003         :return: Nothing
1004         :rtype: N\A
1005         '''
1006         
1007         for jb in self.ljobs:
1008             self.logger.write(src.printcolors.printcLabel(
1009                         "#------- Results for job %s -------#\n" % jb.name))
1010             jb.write_results()
1011             self.logger.write("\n\n")
1012
1013 class Gui(object):
1014     '''Class to manage the the xml data that can be displayed in a browser to
1015        see the jobs states
1016     '''
1017    
1018     def __init__(self, xml_dir_path, l_jobs, l_jobs_not_today, l_file_boards = []):
1019         '''Initialization
1020         
1021         :param xml_dir_path str: The path to the directory where to put 
1022                                  the xml resulting files
1023         :param l_jobs List: the list of jobs that run today
1024         :param l_jobs_not_today List: the list of jobs that do not run today
1025         :param l_file_boards List: the list of file path from which to read the
1026                                    expected boards
1027         '''
1028         # The path of the csv files to read to fill the expected boards
1029         self.l_file_boards = l_file_boards
1030         
1031         today = d_INT_DAY[datetime.date.weekday(datetime.date.today())]
1032         self.parse_csv_boards(today)
1033         
1034         # The path of the global xml file
1035         self.xml_dir_path = xml_dir_path
1036         # Initialize the xml files
1037         xml_global_path = os.path.join(self.xml_dir_path, "global_report.xml")
1038         self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path,
1039                                                          "JobsReport")
1040         # The xml files that corresponds to the boards.
1041         # {name_board : xml_object}}
1042         self.d_xml_board_files = {}
1043         # Create the lines and columns
1044         self.initialize_boards(l_jobs, l_jobs_not_today)
1045         
1046         # Write the xml file
1047         self.update_xml_files(l_jobs)
1048     
1049     def add_xml_board(self, name):
1050         xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
1051         self.d_xml_board_files[name] =  src.xmlManager.XmlLogFile(
1052                                                     xml_board_path,
1053                                                     "JobsReport")
1054         self.d_xml_board_files[name].add_simple_node("distributions")
1055         self.d_xml_board_files[name].add_simple_node("applications")
1056         self.d_xml_board_files[name].add_simple_node("board", text=name)
1057            
1058     def initialize_boards(self, l_jobs, l_jobs_not_today):
1059         '''Get all the first information needed for each file and write the 
1060            first version of the files   
1061         :param l_jobs List: the list of jobs that run today
1062         :param l_jobs_not_today List: the list of jobs that do not run today
1063         '''
1064         # Get the boards to fill and put it in a dictionary
1065         # {board_name : xml instance corresponding to the board}
1066         for job in l_jobs + l_jobs_not_today:
1067             board = job.board
1068             if (board is not None and 
1069                                 board not in self.d_xml_board_files.keys()):
1070                 self.add_xml_board(board)
1071         
1072         # Verify that the boards given as input are done
1073         for board in list(self.d_input_boards.keys()):
1074             if board not in self.d_xml_board_files:
1075                 self.add_xml_board(board)
1076         
1077         # Loop over all jobs in order to get the lines and columns for each 
1078         # xml file
1079         d_dist = {}
1080         d_application = {}
1081         for board in self.d_xml_board_files:
1082             d_dist[board] = []
1083             d_application[board] = []
1084             
1085         l_hosts_ports = []
1086             
1087         for job in l_jobs + l_jobs_not_today:
1088             
1089             if (job.machine.host, job.machine.port) not in l_hosts_ports:
1090                 l_hosts_ports.append((job.machine.host, job.machine.port))
1091                 
1092             distrib = job.machine.distribution
1093             application = job.application
1094             
1095             board_job = job.board
1096             if board is None:
1097                 continue
1098             for board in self.d_xml_board_files:
1099                 if board_job == board:
1100                     if distrib is not None and distrib not in d_dist[board]:
1101                         d_dist[board].append(distrib)
1102                         src.xmlManager.add_simple_node(
1103                             self.d_xml_board_files[board].xmlroot.find(
1104                                                             'distributions'),
1105                                                    "dist",
1106                                                    attrib={"name" : distrib})
1107                     
1108                 if board_job == board:
1109                     if (application is not None and 
1110                                     application not in d_application[board]):
1111                         d_application[board].append(application)
1112                         src.xmlManager.add_simple_node(
1113                             self.d_xml_board_files[board].xmlroot.find(
1114                                                                 'applications'),
1115                                                    "application",
1116                                                    attrib={
1117                                                         "name" : application})
1118         
1119         # Verify that there are no missing application or distribution in the
1120         # xml board files (regarding the input boards)
1121         for board in self.d_xml_board_files:
1122             l_dist = d_dist[board]
1123             if board not in self.d_input_boards.keys():
1124                 continue
1125             for dist in self.d_input_boards[board]["rows"]:
1126                 if dist not in l_dist:
1127                     src.xmlManager.add_simple_node(
1128                             self.d_xml_board_files[board].xmlroot.find(
1129                                                             'distributions'),
1130                                                    "dist",
1131                                                    attrib={"name" : dist})
1132             l_appli = d_application[board]
1133             for appli in self.d_input_boards[board]["columns"]:
1134                 if appli not in l_appli:
1135                     src.xmlManager.add_simple_node(
1136                             self.d_xml_board_files[board].xmlroot.find(
1137                                                                 'applications'),
1138                                                    "application",
1139                                                    attrib={"name" : appli})
1140                 
1141         # Initialize the hosts_ports node for the global file
1142         self.xmlhosts_ports = self.xml_global_file.add_simple_node(
1143                                                                 "hosts_ports")
1144         for host, port in l_hosts_ports:
1145             host_port = "%s:%i" % (host, port)
1146             src.xmlManager.add_simple_node(self.xmlhosts_ports,
1147                                            "host_port",
1148                                            attrib={"name" : host_port})
1149         
1150         # Initialize the jobs node in all files
1151         for xml_file in [self.xml_global_file] + list(
1152                                             self.d_xml_board_files.values()):
1153             xml_jobs = xml_file.add_simple_node("jobs")      
1154             # Get the jobs present in the config file but 
1155             # that will not be launched today
1156             self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
1157             
1158             xml_file.add_simple_node("infos",
1159                                      attrib={"name" : "last update",
1160                                              "JobsCommandStatus" : "running"})
1161         
1162         # Find in each board the squares that needs to be filled regarding the
1163         # input csv files but that are not covered by a today job
1164         for board in self.d_input_boards.keys():
1165             xml_root_board = self.d_xml_board_files[board].xmlroot
1166             xml_missing = src.xmlManager.add_simple_node(xml_root_board,
1167                                                  "missing_jobs")
1168             for row, column in self.d_input_boards[board]["jobs"]:
1169                 found = False
1170                 for job in l_jobs:
1171                     if (job.application == column and 
1172                         job.machine.distribution == row):
1173                         found = True
1174                         break
1175                 if not found:
1176                     src.xmlManager.add_simple_node(xml_missing,
1177                                             "job",
1178                                             attrib={"distribution" : row,
1179                                                     "application" : column })
1180     
1181     def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
1182         '''Get all the first information needed for each file and write the 
1183            first version of the files   
1184
1185         :param xml_node_jobs etree.Element: the node corresponding to a job
1186         :param l_jobs_not_today List: the list of jobs that do not run today
1187         '''
1188         for job in l_jobs_not_today:
1189             xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1190                                                  "job",
1191                                                  attrib={"name" : job.name})
1192             src.xmlManager.add_simple_node(xmlj, "application", job.application)
1193             src.xmlManager.add_simple_node(xmlj,
1194                                            "distribution",
1195                                            job.machine.distribution)
1196             src.xmlManager.add_simple_node(xmlj, "board", job.board)
1197             src.xmlManager.add_simple_node(xmlj,
1198                                        "commands", " ; ".join(job.commands))
1199             src.xmlManager.add_simple_node(xmlj, "state", "Not today")
1200             src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1201             src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1202             src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1203             src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1204             src.xmlManager.add_simple_node(xmlj, "sat_path",
1205                                                         job.machine.sat_path)
1206
1207     def parse_csv_boards(self, today):
1208         """ Parse the csv files that describes the boards to produce and fill 
1209             the dict d_input_boards that contain the csv file contain
1210         
1211         :param today str: the current day of the week 
1212         """
1213         # loop over each csv file and read its content
1214         l_boards = []
1215         for file_path in self.l_file_boards:
1216             l_read = []
1217             with open(file_path, 'r') as f:
1218                 reader = csv.reader(f)
1219                 for row in reader:
1220                     l_read.append(row)
1221             l_boards.append(l_read)
1222     
1223         # loop over the csv lists of lines and get the rows, columns and jobs
1224         d_boards = {}
1225         for input_board in l_boards:
1226             # get board name
1227             board_name = input_board[0][0]
1228             
1229             # Get columns list
1230             columns = input_board[0][1:]
1231             
1232             rows = []
1233             columns_out = []
1234             jobs = []
1235             for line in input_board[1:]:
1236                 row = line[0]
1237                 for i, square in enumerate(line[1:]):
1238                     if today in square:
1239                         if row not in rows:
1240                             rows.append(row)
1241                         if columns[i] not in columns_out:
1242                             columns_out.append(columns[i])
1243                         job = (row, columns[i])
1244                         jobs.append(job)
1245             
1246             d_boards[board_name] = {"rows" : rows,
1247                                     "columns" : columns_out,
1248                                     "jobs" : jobs}
1249         
1250         self.d_input_boards = d_boards
1251
1252     def update_xml_files(self, l_jobs):
1253         '''Write all the xml files with updated information about the jobs   
1254
1255         :param l_jobs List: the list of jobs that run today
1256         '''
1257         for xml_file in [self.xml_global_file] + list(
1258                                             self.d_xml_board_files.values()):
1259             self.update_xml_file(l_jobs, xml_file)
1260             
1261         # Write the file
1262         self.write_xml_files()
1263             
1264     def update_xml_file(self, l_jobs, xml_file):      
1265         '''update information about the jobs for the file xml_file   
1266
1267         :param l_jobs List: the list of jobs that run today
1268         :param xml_file xmlManager.XmlLogFile: the xml instance to update
1269         '''
1270         
1271         xml_node_jobs = xml_file.xmlroot.find('jobs')
1272         # Update the job names and status node
1273         for job in l_jobs:
1274             # Find the node corresponding to the job and delete it
1275             # in order to recreate it
1276             for xmljob in xml_node_jobs.findall('job'):
1277                 if xmljob.attrib['name'] == job.name:
1278                     xml_node_jobs.remove(xmljob)
1279             
1280             T0 = str(job._T0)
1281             if T0 != "-1":
1282                 T0 = time.strftime('%Y-%m-%d %H:%M:%S', 
1283                                        time.localtime(job._T0))
1284             Tf = str(job._Tf)
1285             if Tf != "-1":
1286                 Tf = time.strftime('%Y-%m-%d %H:%M:%S', 
1287                                        time.localtime(job._Tf))
1288             
1289             # recreate the job node
1290             xmlj = src.xmlManager.add_simple_node(xml_node_jobs,
1291                                                   "job",
1292                                                   attrib={"name" : job.name})
1293             src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name)
1294             src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
1295             src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
1296             src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
1297             src.xmlManager.add_simple_node(xmlj, "sat_path",
1298                                            job.machine.sat_path)
1299             src.xmlManager.add_simple_node(xmlj, "application", job.application)
1300             src.xmlManager.add_simple_node(xmlj, "distribution",
1301                                            job.machine.distribution)
1302             src.xmlManager.add_simple_node(xmlj, "board", job.board)
1303             src.xmlManager.add_simple_node(xmlj, "timeout", str(job.timeout))
1304             src.xmlManager.add_simple_node(xmlj, "commands",
1305                                            " ; ".join(job.commands))
1306             src.xmlManager.add_simple_node(xmlj, "state", job.get_status())
1307             src.xmlManager.add_simple_node(xmlj, "begin", T0)
1308             src.xmlManager.add_simple_node(xmlj, "end", Tf)
1309             src.xmlManager.add_simple_node(xmlj, "out",
1310                                            src.printcolors.cleancolor(job.out))
1311             src.xmlManager.add_simple_node(xmlj, "err",
1312                                            src.printcolors.cleancolor(job.err))
1313             src.xmlManager.add_simple_node(xmlj, "res", str(job.res_job))
1314             if len(job.remote_log_files) > 0:
1315                 src.xmlManager.add_simple_node(xmlj,
1316                                                "remote_log_file_path",
1317                                                job.remote_log_files[0])
1318             else:
1319                 src.xmlManager.add_simple_node(xmlj,
1320                                                "remote_log_file_path",
1321                                                "nothing")           
1322             
1323             xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
1324             # get the job father
1325             if job.after is not None:
1326                 job_father = None
1327                 for jb in l_jobs:
1328                     if jb.name == job.after:
1329                         job_father = jb
1330                 
1331                 if (job_father is not None and 
1332                         len(job_father.remote_log_files) > 0):
1333                     link = job_father.remote_log_files[0]
1334                 else:
1335                     link = "nothing"
1336                 src.xmlManager.append_node_attrib(xmlafter, {"link" : link})
1337             
1338             # Verify that the job is to be done today regarding the input csv
1339             # files
1340             if job.board and job.board in self.d_input_boards.keys():
1341                 found = False
1342                 for dist, appli in self.d_input_boards[job.board]["jobs"]:
1343                     if (job.machine.distribution == dist 
1344                         and job.application == appli):
1345                         found = True
1346                         src.xmlManager.add_simple_node(xmlj,
1347                                                "extra_job",
1348                                                "no")
1349                         break
1350                 if not found:
1351                     src.xmlManager.add_simple_node(xmlj,
1352                                                "extra_job",
1353                                                "yes")
1354             
1355         
1356         # Update the date
1357         xml_node_infos = xml_file.xmlroot.find('infos')
1358         src.xmlManager.append_node_attrib(xml_node_infos,
1359                     attrib={"value" : 
1360                     datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
1361                
1362
1363     
1364     def last_update(self, finish_status = "finished"):
1365         '''update information about the jobs for the file xml_file   
1366
1367         :param l_jobs List: the list of jobs that run today
1368         :param xml_file xmlManager.XmlLogFile: the xml instance to update
1369         '''
1370         for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
1371             xml_node_infos = xml_file.xmlroot.find('infos')
1372             src.xmlManager.append_node_attrib(xml_node_infos,
1373                         attrib={"JobsCommandStatus" : finish_status})
1374         # Write the file
1375         self.write_xml_files()
1376     
1377     def write_xml_files(self):
1378         ''' Write the xml files   
1379         '''
1380         self.xml_global_file.write_tree(STYLESHEET_GLOBAL)
1381         for xml_file in self.d_xml_board_files.values():
1382             xml_file.write_tree(STYLESHEET_BOARD)
1383         
1384 ##
1385 # Describes the command
1386 def description():
1387     return _("The jobs command launches maintenances that are described"
1388              " in the dedicated jobs configuration file.")
1389
1390 ##
1391 # Runs the command.
1392 def run(args, runner, logger):
1393        
1394     (options, args) = parser.parse_args(args)
1395     
1396     jobs_cfg_files_dir = runner.cfg.SITE.jobs.config_path
1397     
1398     l_cfg_dir = [os.path.join(runner.cfg.VARS.datadir, "jobs"),
1399                  jobs_cfg_files_dir]
1400     
1401     # Make sure the path to the jobs config files directory exists 
1402     src.ensure_path_exists(jobs_cfg_files_dir)   
1403     
1404     # list option : display all the available config files
1405     if options.list:
1406         for cfg_dir in l_cfg_dir:
1407             if not options.no_label:
1408                 logger.write("------ %s\n" % 
1409                                  src.printcolors.printcHeader(cfg_dir))
1410     
1411             for f in sorted(os.listdir(cfg_dir)):
1412                 if not f.endswith('.pyconf'):
1413                     continue
1414                 cfilename = f[:-7]
1415                 logger.write("%s\n" % cfilename)
1416         return 0
1417
1418     # Make sure the jobs_config option has been called
1419     if not options.jobs_cfg:
1420         message = _("The option --jobs_config is required\n")      
1421         src.printcolors.printcError(message)
1422         return 1
1423     
1424     # Find the file in the directories
1425     found = False
1426     for cfg_dir in l_cfg_dir:
1427         file_jobs_cfg = os.path.join(cfg_dir, options.jobs_cfg)
1428         if not file_jobs_cfg.endswith('.pyconf'):
1429             file_jobs_cfg += '.pyconf'
1430         
1431         if not os.path.exists(file_jobs_cfg):
1432             continue
1433         else:
1434             found = True
1435             break
1436     
1437     if not found:
1438         msg = _("The file configuration %(name_file)s was not found."
1439                 "\nUse the --list option to get the possible files.")
1440         src.printcolors.printcError(msg)
1441         return 1
1442     
1443     info = [
1444         (_("Platform"), runner.cfg.VARS.dist),
1445         (_("File containing the jobs configuration"), file_jobs_cfg)
1446     ]    
1447     src.print_info(logger, info)
1448     
1449     # Read the config that is in the file
1450     config_jobs = src.read_config_from_a_file(file_jobs_cfg)
1451     if options.only_jobs:
1452         l_jb = src.pyconf.Sequence()
1453         for jb in config_jobs.jobs:
1454             if jb.name in options.only_jobs:
1455                 l_jb.append(jb,
1456                 "Adding a job that was given in only_jobs option parameters")
1457         config_jobs.jobs = l_jb
1458      
1459     # Initialization
1460     today_jobs = Jobs(runner,
1461                       logger,
1462                       options.jobs_cfg,
1463                       file_jobs_cfg,
1464                       config_jobs)
1465     # SSH connection to all machines
1466     today_jobs.ssh_connection_all_machines()
1467     if options.test_connection:
1468         return 0
1469     
1470     gui = None
1471     if options.publish:
1472         # Copy the stylesheets in the log directory 
1473         log_dir = runner.cfg.SITE.log.log_dir
1474         xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
1475         files_to_copy = []
1476         files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
1477         files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD))
1478         files_to_copy.append(os.path.join(xsl_dir, "running.gif"))
1479         for file_path in files_to_copy:
1480             shutil.copy2(file_path, log_dir)
1481         
1482         # Instanciate the Gui in order to produce the xml files that contain all
1483         # the boards
1484         gui = Gui(runner.cfg.SITE.log.log_dir,
1485                   today_jobs.ljobs,
1486                   today_jobs.ljobs_not_today,
1487                   l_file_boards = options.input_boards)
1488     
1489     today_jobs.gui = gui
1490     
1491     interruped = False
1492     try:
1493         # Run all the jobs contained in config_jobs
1494         today_jobs.run_jobs()
1495     except KeyboardInterrupt:
1496         interruped = True
1497         logger.write("\n\n%s\n\n" % 
1498                 (src.printcolors.printcWarning(_("Forced interruption"))), 1)
1499     finally:
1500         if interruped:
1501             msg = _("Killing the running jobs and trying"
1502                     " to get the corresponding logs\n")
1503             logger.write(src.printcolors.printcWarning(msg))
1504             
1505         # find the potential not finished jobs and kill them
1506         for jb in today_jobs.ljobs:
1507             if not jb.has_finished():
1508                 jb.kill_remote_process()
1509         if interruped:
1510             if today_jobs.gui:
1511                 today_jobs.gui.last_update(_("Forced interruption"))
1512         else:
1513             if today_jobs.gui:
1514                 today_jobs.gui.last_update()
1515         # Output the results
1516         today_jobs.write_all_results()