Salome HOME
ajout option d'impression des graphes de dépendance
[tools/sat.git] / commands / jobs.py
index 80a6730717e8a7dc896a7eb960ea0a39bf18fa1c..c043d7048e094d8463f6f40b6f9f26b9a6c3c7dc 100644 (file)
 #  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307 USA
 
 import os
+import sys
+import tempfile
+import traceback
 import datetime
 import time
-import paramiko
+import csv
+import shutil
+import itertools
+import re
+
+# generate problem
+try:
+  import paramiko
+except:
+  paramiko = "import paramiko impossible"
+  pass
 
 import src
 
+
+import src.ElementTree as etree
+
 STYLESHEET_GLOBAL = "jobs_global_report.xsl"
-STYLESHEET_TABLE = "jobs_table_report.xsl"
+STYLESHEET_BOARD = "jobs_board_report.xsl"
+
+DAYS_SEPARATOR = ","
+CSV_DELIMITER = ";"
 
 parser = src.options.Options()
 
-parser.add_option('j', 'jobs_config', 'string', 'jobs_cfg', 
-                  _('The name of the config file that contains'
-                  ' the jobs configuration'))
+parser.add_option('n', 'name', 'list2', 'jobs_cfg', 
+                  _('Mandatory: The name of the config file that contains'
+                  ' the jobs configuration. Can be a list.'))
 parser.add_option('o', 'only_jobs', 'list2', 'only_jobs',
-                  _('The list of jobs to launch, by their name. '))
+                  _('Optional: the list of jobs to launch, by their name. '))
 parser.add_option('l', 'list', 'boolean', 'list', 
-                  _('list all available config files.'))
-parser.add_option('n', 'no_label', 'boolean', 'no_label',
-                  _("do not print labels, Works only with --list."), False)
+                  _('Optional: list all available config files.'))
 parser.add_option('t', 'test_connection', 'boolean', 'test_connection',
-                  _("Try to connect to the machines. Not executing the jobs."),
+                  _("Optional: try to connect to the machines. "
+                    "Not executing the jobs."),
                   False)
 parser.add_option('p', 'publish', 'boolean', 'publish',
-                  _("Generate an xml file that can be read in a browser to "
-                    "display the jobs status."),
+                  _("Optional: generate an xml file that can be read in a "
+                    "browser to display the jobs status."),
+                  False)
+parser.add_option('i', 'input_boards', 'string', 'input_boards', _("Optional: "
+                                "the path to csv file that contain "
+                                "the expected boards."),"")
+parser.add_option('', 'completion', 'boolean', 'no_label',
+                  _("Optional (internal use): do not print labels, Works only "
+                    "with --list."),
                   False)
 
 class Machine(object):
@@ -58,6 +83,7 @@ class Machine(object):
         self.name = name
         self.host = host
         self.port = port
+        self.distribution = None # Will be filled after copying SAT on the machine
         self.user = user
         self.password = passwd
         self.sat_path = sat_path
@@ -115,14 +141,17 @@ class Machine(object):
         '''
         res = 0
         try:
+            # open a sftp connection
             self.sftp = self.ssh.open_sftp()
+            # Create the sat directory on remote machine if it is not existing
             self.mkdir(self.sat_path, ignore_existing=True)
+            # Put sat
             self.put_dir(sat_local_path, self.sat_path, filters = ['.git'])
-            job_file_name = os.path.basename(job_file)
+            # put the job configuration file in order to make it reachable 
+            # on the remote machine
+            remote_job_file_name = ".%s" % os.path.basename(job_file)
             self.sftp.put(job_file, os.path.join(self.sat_path,
-                                                 "data",
-                                                 "jobs",
-                                                 job_file_name))
+                                                 remote_job_file_name))
         except Exception as e:
             res = str(e)
             self._connection_successful = False
@@ -131,7 +160,7 @@ class Machine(object):
         
     def put_dir(self, source, target, filters = []):
         ''' Uploads the contents of the source directory to the target path. The
-            target directory needs to exists. All subdirectories in source are 
+            target directory needs to exists. All sub-directories in source are 
             created under target.
         '''
         for item in os.listdir(source):
@@ -220,16 +249,25 @@ class Machine(object):
 class Job(object):
     '''Class to manage one job
     '''
-    def __init__(self, name, machine, application, distribution, table, 
-                 commands, timeout, config, logger, job_file, after=None):
+    def __init__(self,
+                 name,
+                 machine,
+                 application,
+                 board, 
+                 commands,
+                 timeout,
+                 config,
+                 job_file_path,
+                 logger,
+                 after=None,
+                 prefix=None):
 
         self.name = name
         self.machine = machine
         self.after = after
         self.timeout = timeout
         self.application = application
-        self.distribution = distribution
-        self.table = table
+        self.board = board
         self.config = config
         self.logger = logger
         # The list of log files to download from the remote machine 
@@ -250,20 +288,29 @@ class Job(object):
         self._stdout = None # Store the command outputs field
         self._stderr = None # Store the command errors field
 
-        self.out = None # Contains something only if the job is finished
-        self.err = None # Contains something only if the job is finished    
-               
+        self.out = ""
+        self.err = ""
+        
+        self.name_remote_jobs_pyconf = ".%s" % os.path.basename(job_file_path)
         self.commands = commands
         self.command = (os.path.join(self.machine.sat_path, "sat") +
                         " -l " +
                         os.path.join(self.machine.sat_path,
                                      "list_log_files.txt") +
-                        " job --jobs_config " +
-                        job_file +
-                        " --job " +
-                        self.name)
+                        " job --jobs_config " + 
+                        os.path.join(self.machine.sat_path,
+                                     self.name_remote_jobs_pyconf) +
+                        " --name " + self.name)
+        if prefix:
+            self.command = prefix + ' "' + self.command +'"'
     
     def get_pids(self):
+        """ Get the pid(s) corresponding to the command that have been launched
+            On the remote machine
+        
+        :return: The list of integers corresponding to the found pids
+        :rtype: List
+        """
         pids = []
         cmd_pid = 'ps aux | grep "' + self.command + '" | awk \'{print $2}\''
         (_, out_pid, _) = self.machine.exec_command(cmd_pid, self.logger)
@@ -278,13 +325,16 @@ class Job(object):
         :return: (the output of the kill, the error of the kill)
         :rtype: (str, str)
         '''
-        
-        pids = self.get_pids()
+        try:
+            pids = self.get_pids()
+        except:
+            return ("Unable to get the pid of the command.", "")
+            
         cmd_kill = " ; ".join([("kill -2 " + pid) for pid in pids])
         (_, out_kill, err_kill) = self.machine.exec_command(cmd_kill, 
                                                             self.logger)
         time.sleep(wait)
-        return (out_kill, err_kill)
+        return (out_kill.read().decode(), err_kill.read().decode())
             
     def has_begun(self):
         '''Returns True if the job has already begun
@@ -314,50 +364,92 @@ class Job(object):
         if self._stdout.channel.closed:
             self._has_finished = True
             # Store the result outputs
-            self.out = self._stdout.read()
-            self.err = self._stderr.read()
+            self.out += self._stdout.read().decode()
+            self.err += self._stderr.read().decode()
             # Put end time
             self._Tf = time.time()
             # And get the remote command status and log files
-            self.get_log_files()
+            try:
+                self.get_log_files()
+            except Exception as e:
+                self.err += _("Unable to get remote log files: %s" % e)
         
         return self._has_finished
           
     def get_log_files(self):
+        """Get the log files produced by the command launched 
+           on the remote machine, and put it in the log directory of the user,
+           so they can be accessible from 
+        """
+        # Do not get the files if the command is not finished
         if not self.has_finished():
             msg = _("Trying to get log files whereas the job is not finished.")
             self.logger.write(src.printcolors.printcWarning(msg))
             return
         
+        # First get the file that contains the list of log files to get
         tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt")
+        remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt")
         self.machine.sftp.get(
-                    os.path.join(self.machine.sat_path, "list_log_files.txt"),
+                    remote_path,
                     tmp_file_path)
         
+        # Read the file and get the result of the command and all the log files
+        # to get
         fstream_tmp = open(tmp_file_path, "r")
         file_lines = fstream_tmp.readlines()
         file_lines = [line.replace("\n", "") for line in file_lines]
         fstream_tmp.close()
         os.remove(tmp_file_path)
-        self.res_job = file_lines[0]
-        for job_path_remote in file_lines[1:]:
+        
+        try :
+            # The first line is the result of the command (0 success or 1 fail)
+            self.res_job = file_lines[0]
+        except Exception as e:
+            self.err += _("Unable to get status from remote file %s: %s" % 
+                                                    (remote_path, str(e)))
+
+        for i, job_path_remote in enumerate(file_lines[1:]):
             try:
-                if os.path.basename(os.path.dirname(job_path_remote)) != 'OUT':
+                # For each command, there is two files to get :
+                # 1- The xml file describing the command and giving the 
+                # internal traces.
+                # 2- The txt file containing the system command traces (like 
+                # traces produced by the "make" command)
+                # 3- In case of the test command, there is another file to get :
+                # the xml board that contain the test results
+                dirname = os.path.basename(os.path.dirname(job_path_remote))
+                if dirname != 'OUT' and dirname != 'TEST':
+                    # Case 1-
                     local_path = os.path.join(os.path.dirname(
                                                         self.logger.logFilePath),
                                               os.path.basename(job_path_remote))
-                    if not os.path.exists(local_path):
-                        self.machine.sftp.get(job_path_remote, local_path)
-                else:
+                    if i==0: # The first is the job command
+                        self.logger.add_link(os.path.basename(job_path_remote),
+                                             "job",
+                                             self.res_job,
+                                             self.command) 
+                elif dirname == 'OUT':
+                    # Case 2-
                     local_path = os.path.join(os.path.dirname(
                                                         self.logger.logFilePath),
                                               'OUT',
                                               os.path.basename(job_path_remote))
-                    if not os.path.exists(local_path):
-                        self.machine.sftp.get(job_path_remote, local_path)
+                elif dirname == 'TEST':
+                    # Case 3-
+                    local_path = os.path.join(os.path.dirname(
+                                                        self.logger.logFilePath),
+                                              'TEST',
+                                              os.path.basename(job_path_remote))
+                
+                # Get the file
+                if not os.path.exists(local_path):
+                    self.machine.sftp.get(job_path_remote, local_path)
                 self.remote_log_files.append(local_path)
-            except:
-                self.err += _("Unable to get %s log file from remote.") % job_path_remote
+            except Exception as e:
+                self.err += _("Unable to get %s log file from remote: %s" % 
+                                                    (str(job_path_remote),
+                                                     str(e)))
 
     def has_failed(self):
         '''Returns True if the job has failed. 
@@ -382,11 +474,13 @@ class Job(object):
         """In case of a failing job, one has to cancel every job that depend 
            on it. This method put the job as failed and will not be executed.
         """
+        if self.cancelled:
+            return
         self._has_begun = True
         self._has_finished = True
         self.cancelled = True
-        self.out = _("This job was not launched because its father has failed.")
-        self.err = _("This job was not launched because its father has failed.")
+        self.out += _("This job was not launched because its father has failed.")
+        self.err += _("This job was not launched because its father has failed.")
 
     def is_running(self):
         '''Returns True if the job commands are running 
@@ -405,39 +499,60 @@ class Job(object):
         return self._has_timouted
 
     def time_elapsed(self):
+        """Get the time elapsed since the job launching
+        
+        :return: The number of seconds
+        :rtype: int
+        """
         if not self.has_begun():
             return -1
         T_now = time.time()
         return T_now - self._T0
     
     def check_time(self):
+        """Verify that the job has not exceeded its timeout.
+           If it has, kill the remote command and consider the job as finished.
+        """
         if not self.has_begun():
             return
         if self.time_elapsed() > self.timeout:
             self._has_finished = True
             self._has_timouted = True
             self._Tf = time.time()
-            self.get_pids()
-            (out_kill, _) = self.kill_remote_process()
-            self.out = "TIMEOUT \n" + out_kill.read()
-            self.err = "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
+            (out_kill, __) = self.kill_remote_process()
+            self.out += "TIMEOUT \n" + out_kill
+            self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
             try:
                 self.get_log_files()
-            except:
-                self.err += _("Unable to get remote log files")
+            except Exception as e:
+                self.err += _("Unable to get remote log files!\n%s\n" % str(e))
             
     def total_duration(self):
+        """Give the total duration of the job
+        
+        :return: the total duration of the job in seconds
+        :rtype: int
+        """
         return self._Tf - self._T0
         
-    def run(self, logger):
+    def run(self):
+        """Launch the job by executing the remote command.
+        """
+        
+        # Prevent multiple run
         if self.has_begun():
-            print("Warn the user that a job can only be launched one time")
+            msg = _("Warning: A job can only be launched one time")
+            msg2 = _("Trying to launch the job \"%s\" whereas it has "
+                     "already been launched." % self.name)
+            self.logger.write(src.printcolors.printcWarning("%s\n%s\n" % (msg,
+                                                                        msg2)))
             return
         
-        if not self.machine.successfully_connected(logger):
+        # Do not execute the command if the machine could not be reached
+        if not self.machine.successfully_connected(self.logger):
             self._has_finished = True
             self.out = "N\A"
-            self.err = ("Connection to machine (name : %s, host: %s, port:"
+            self.err += ("Connection to machine (name : %s, host: %s, port:"
                         " %s, user: %s) has failed\nUse the log command "
                         "to get more information."
                         % (self.machine.name,
@@ -445,49 +560,58 @@ class Job(object):
                            self.machine.port,
                            self.machine.user))
         else:
+            # Usual case : Launch the command on remote machine
             self._T0 = time.time()
             self._stdin, self._stdout, self._stderr = self.machine.exec_command(
-                                                        self.command, logger)
+                                                                  self.command,
+                                                                  self.logger)
+            # If the results are not initialized, finish the job
             if (self._stdin, self._stdout, self._stderr) == (None, None, None):
                 self._has_finished = True
                 self._Tf = time.time()
-                self.out = "N\A"
-                self.err = "The server failed to execute the command"
+                self.out += "N\A"
+                self.err += "The server failed to execute the command"
         
+        # Put the beginning flag to true.
         self._has_begun = True
     
-    def write_results(self, logger):
-        logger.write("name : " + self.name + "\n")
+    def write_results(self):
+        """Display on the terminal all the job's information
+        """
+        self.logger.write("name : " + self.name + "\n")
         if self.after:
-            logger.write("after : %s\n" % self.after)
-        logger.write("Time elapsed : %4imin %2is \n" % 
-                     (self.total_duration()/60 , self.total_duration()%60))
+            self.logger.write("after : %s\n" % self.after)
+        self.logger.write("Time elapsed : %4imin %2is \n" % 
+                     (self.total_duration()//60 , self.total_duration()%60))
         if self._T0 != -1:
-            logger.write("Begin time : %s\n" % 
+            self.logger.write("Begin time : %s\n" % 
                          time.strftime('%Y-%m-%d %H:%M:%S', 
                                        time.localtime(self._T0)) )
         if self._Tf != -1:
-            logger.write("End time   : %s\n\n" % 
+            self.logger.write("End time   : %s\n\n" % 
                          time.strftime('%Y-%m-%d %H:%M:%S', 
                                        time.localtime(self._Tf)) )
         
         machine_head = "Informations about connection :\n"
         underline = (len(machine_head) - 2) * "-"
-        logger.write(src.printcolors.printcInfo(machine_head+underline+"\n"))
-        self.machine.write_info(logger)
+        self.logger.write(src.printcolors.printcInfo(
+                                                machine_head+underline+"\n"))
+        self.machine.write_info(self.logger)
         
-        logger.write(src.printcolors.printcInfo("out : \n"))
-        if self.out is None:
-            logger.write("Unable to get output\n")
-        else:
-            logger.write(self.out + "\n")
-        logger.write(src.printcolors.printcInfo("err : \n"))
-        if self.err is None:
-            logger.write("Unable to get error\n")
+        self.logger.write(src.printcolors.printcInfo("out : \n"))
+        if self.out == "":
+            self.logger.write("Unable to get output\n")
         else:
-            logger.write(self.err + "\n")
+            self.logger.write(self.out + "\n")
+        self.logger.write(src.printcolors.printcInfo("err : \n"))
+        self.logger.write(self.err + "\n")
         
     def get_status(self):
+        """Get the status of the job (used by the Gui for xml display)
+        
+        :return: The current status of the job
+        :rtype: String
+        """
         if not self.machine.successfully_connected(self.logger):
             return "SSH connection KO"
         if not self.has_begun():
@@ -510,13 +634,11 @@ class Jobs(object):
     def __init__(self,
                  runner,
                  logger,
-                 job_file,
                  job_file_path,
                  config_jobs,
                  lenght_columns = 20):
         # The jobs configuration
         self.cfg_jobs = config_jobs
-        self.job_file = job_file
         self.job_file_path = job_file_path
         # The machine that will be used today
         self.lmachines = []
@@ -530,8 +652,6 @@ class Jobs(object):
         self.ljobs_not_today = []
         self.runner = runner
         self.logger = logger
-        # The correlation dictionary between jobs and machines
-        self.dic_job_machine = {} 
         self.len_columns = lenght_columns
         
         # the list of jobs that have not been run yet
@@ -554,31 +674,34 @@ class Jobs(object):
         '''
         name = job_def.name
         cmmnds = job_def.commands
-        timeout = job_def.timeout
+        if not "timeout" in job_def:
+            timeout = 4*60*60 # default timeout = 4h
+        else:
+            timeout = job_def.timeout
         after = None
         if 'after' in job_def:
             after = job_def.after
         application = None
         if 'application' in job_def:
             application = job_def.application
-        distribution = None
-        if 'distribution' in job_def:
-            distribution = job_def.distribution
-        table = None
-        if 'table' in job_def:
-            table = job_def.table
+        board = None
+        if 'board' in job_def:
+            board = job_def.board
+        prefix = None
+        if "prefix" in job_def:
+            prefix = job_def.prefix
             
         return Job(name,
                    machine,
                    application,
-                   distribution,
-                   table,
+                   board,
                    cmmnds,
                    timeout,
                    self.runner.cfg,
+                   self.job_file_path,
                    self.logger,
-                   self.job_file,
-                   after = after)
+                   after = after,
+                   prefix = prefix)
     
     def determine_jobs_and_machines(self):
         '''Function that reads the pyconf jobs definition and instantiates all
@@ -650,17 +773,19 @@ class Jobs(object):
                     msg = _("WARNING: The job \"%(job_name)s\" requires the "
                             "machine \"%(machine_name)s\" but this machine "
                             "is not defined in the configuration file.\n"
-                            "The job will not be launched")
-                    self.logger.write(src.printcolors.printcWarning(msg))
+                            "The job will not be launched\n")
+                    self.logger.write(src.printcolors.printcWarning(
+                                        msg % {"job_name" : job_def.name,
+                                               "machine_name" : name_machine}))
+                    continue
                                   
             a_job = self.define_job(job_def, a_machine)
-            self.dic_job_machine[a_job] = a_machine
                 
             if today in job_def.when:    
                 self.ljobs.append(a_job)
             else: # today in job_def.when
                 self.ljobs_not_today.append(a_job)
-                                     
+               
         self.lhosts = host_list
         
     def ssh_connection_all_machines(self, pad=50):
@@ -688,12 +813,39 @@ class Jobs(object):
             
             # Copy salomeTools to the remote machine
             if machine.successfully_connected(self.logger):
+                step = _("Remove SAT")
+                self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
+                self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
+                (__, out_dist, __) = machine.exec_command(
+                                                "rm -rf %s" % machine.sat_path,
+                                                self.logger)
+                out_dist.read()
+                
+                self.logger.flush()
                 step = _("Copy SAT")
                 self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3)
                 self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3)
                 self.logger.flush()
                 res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway,
                                             self.job_file_path)
+
+                # set the local settings of sat on the remote machine using
+                # the init command
+                (__, out_dist, __) = machine.exec_command(
+                                os.path.join(machine.sat_path,
+                                    "sat init --base default --workdir"
+                                    " default --log_dir default"),
+                                self.logger)
+                out_dist.read()    
+                
+                # get the remote machine distribution using a sat command
+                (__, out_dist, __) = machine.exec_command(
+                                os.path.join(machine.sat_path,
+                                    "sat config --value VARS.dist --no_label"),
+                                self.logger)
+                machine.distribution = out_dist.read().decode().replace("\n",
+                                                                        "")
+                
                 # Print the status of the copy
                 if res_copy == 0:
                     self.logger.write('\r%s' % 
@@ -708,8 +860,8 @@ class Jobs(object):
                     self.logger.write('\r%s%s%s %s' % 
                         (begin_line,
                          endline,
-                         src.printcolors.printc(src.OK_STATUS),
-                         _("Copy of SAT failed")), 3)
+                         src.printcolors.printc(src.KO_STATUS),
+                         _("Copy of SAT failed: %s" % res_copy)), 3)
             else:
                 self.logger.write('\r%s' % 
                                   ((len(begin_line)+len(endline)+20) * " "), 3)
@@ -734,7 +886,7 @@ class Jobs(object):
         '''
         host = hostname[0]
         port = hostname[1]
-        for jb in self.dic_job_machine:
+        for jb in self.ljobs:
             if jb.machine.host == host and jb.machine.port == port:
                 if jb.is_running():
                     return jb
@@ -749,7 +901,7 @@ class Jobs(object):
         '''
         jobs_finished_list = []
         jobs_running_list = []
-        for jb in self.dic_job_machine:
+        for jb in self.ljobs:
             if jb.is_running():
                 jobs_running_list.append(jb)
                 jb.check_time()
@@ -775,7 +927,7 @@ class Jobs(object):
             if job.after is None:
                 continue
             father_job = self.find_job_that_has_name(job.after)
-            if father_job.has_failed():
+            if father_job is not None and father_job.has_failed():
                 job.cancel()
     
     def find_job_that_has_name(self, name):
@@ -788,10 +940,8 @@ class Jobs(object):
         for jb in self.ljobs:
             if jb.name == name:
                 return jb
-
         # the following is executed only if the job was not found
-        msg = _('The job "%s" seems to be nonexistent') % name
-        raise src.SatException(msg)
+        return None
     
     def str_of_length(self, text, length):
         '''Takes a string text of any length and returns 
@@ -806,8 +956,8 @@ class Jobs(object):
             text_out = text[:length-3] + '...'
         else:
             diff = length - len(text)
-            before = " " * (diff/2)
-            after = " " * (diff/2 + diff%2)
+            before = " " * (diff//2)
+            after = " " * (diff//2 + diff%2)
             text_out = before + text + after
             
         return text_out
@@ -868,8 +1018,8 @@ class Jobs(object):
         self.logger.flush()
         
         # The infinite loop that runs the jobs
-        l_jobs_not_started = self.dic_job_machine.keys()
-        while len(self._l_jobs_finished) != len(self.dic_job_machine.keys()):
+        l_jobs_not_started = src.deepcopy_list(self.ljobs)
+        while len(self._l_jobs_finished) != len(self.ljobs):
             new_job_start = False
             for host_port in self.lhosts:
                 
@@ -880,14 +1030,21 @@ class Jobs(object):
                     if (jb.machine.host, jb.machine.port) != host_port:
                         continue 
                     if jb.after == None:
-                        jb.run(self.logger)
+                        jb.run()
                         l_jobs_not_started.remove(jb)
                         new_job_start = True
                         break
                     else:
-                        jb_before = self.find_job_that_has_name(jb.after) 
+                        jb_before = self.find_job_that_has_name(jb.after)
+                        if jb_before is None:
+                            jb.cancel()
+                            msg = _("This job was not launched because its "
+                                    "father is not in the jobs list.")
+                            jb.out = msg
+                            jb.err = msg
+                            break
                         if jb_before.has_finished():
-                            jb.run(self.logger)
+                            jb.run()
                             l_jobs_not_started.remove(jb)
                             new_job_start = True
                             break
@@ -895,7 +1052,8 @@ class Jobs(object):
             new_job_finished = self.update_jobs_states_list()
             
             if new_job_start or new_job_finished:
-                self.gui.update_xml_files(self.ljobs)            
+                if self.gui:
+                    self.gui.update_xml_files(self.ljobs)            
                 # Display the current status     
                 self.display_status(self.len_columns)
             
@@ -906,8 +1064,9 @@ class Jobs(object):
         self.logger.write(tiret_line)                   
         self.logger.write("\n\n")
         
-        self.gui.update_xml_files(self.ljobs)
-        self.gui.last_update()
+        if self.gui:
+            self.gui.update_xml_files(self.ljobs)
+            self.gui.last_update()
 
     def write_all_results(self):
         '''Display all the jobs outputs.
@@ -916,10 +1075,10 @@ class Jobs(object):
         :rtype: N\A
         '''
         
-        for jb in self.dic_job_machine.keys():
+        for jb in self.ljobs:
             self.logger.write(src.printcolors.printcLabel(
                         "#------- Results for job %s -------#\n" % jb.name))
-            jb.write_results(self.logger)
+            jb.write_results()
             self.logger.write("\n\n")
 
 class Gui(object):
@@ -927,55 +1086,101 @@ class Gui(object):
        see the jobs states
     '''
    
-    def __init__(self, xml_dir_path, l_jobs, l_jobs_not_today):
+    def __init__(self,
+                 xml_dir_path,
+                 l_jobs,
+                 l_jobs_not_today,
+                 prefix,
+                 logger,
+                 file_boards=""):
         '''Initialization
         
         :param xml_dir_path str: The path to the directory where to put 
                                  the xml resulting files
         :param l_jobs List: the list of jobs that run today
         :param l_jobs_not_today List: the list of jobs that do not run today
+        :param file_boards str: the file path from which to read the
+                                   expected boards
         '''
+        # The logging instance
+        self.logger = logger
+        
+        # The prefix to add to the xml files : date_hour
+        self.prefix = prefix
+        
+        # The path of the csv files to read to fill the expected boards
+        self.file_boards = file_boards
+        
+        if file_boards != "":
+            today = datetime.date.weekday(datetime.date.today())
+            self.parse_csv_boards(today)
+        else:
+            self.d_input_boards = {}
+        
         # The path of the global xml file
         self.xml_dir_path = xml_dir_path
         # Initialize the xml files
-        xml_global_path = os.path.join(self.xml_dir_path, "global_report.xml")
+        self.global_name = "global_report"
+        xml_global_path = os.path.join(self.xml_dir_path,
+                                       self.global_name + ".xml")
         self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path,
                                                          "JobsReport")
-        # The xml files that corresponds to the tables.
-        # {name_table : xml_object}}
-        self.d_xml_table_files = {}
+
+        # Find history for each job
+        self.history = {}
+        self.find_history(l_jobs, l_jobs_not_today)
+
+        # The xml files that corresponds to the boards.
+        # {name_board : xml_object}}
+        self.d_xml_board_files = {}
+
         # Create the lines and columns
-        self.initialize_arrays(l_jobs, l_jobs_not_today)
+        self.initialize_boards(l_jobs, l_jobs_not_today)
+
         # Write the xml file
         self.update_xml_files(l_jobs)
     
-    def initialize_arrays(self, l_jobs, l_jobs_not_today):
+    def add_xml_board(self, name):
+        '''Add a board to the board list   
+        :param name str: the board name
+        '''
+        xml_board_path = os.path.join(self.xml_dir_path, name + ".xml")
+        self.d_xml_board_files[name] =  src.xmlManager.XmlLogFile(
+                                                    xml_board_path,
+                                                    "JobsReport")
+        self.d_xml_board_files[name].add_simple_node("distributions")
+        self.d_xml_board_files[name].add_simple_node("applications")
+        self.d_xml_board_files[name].add_simple_node("board", text=name)
+           
+    def initialize_boards(self, l_jobs, l_jobs_not_today):
         '''Get all the first information needed for each file and write the 
            first version of the files   
         :param l_jobs List: the list of jobs that run today
         :param l_jobs_not_today List: the list of jobs that do not run today
         '''
-        # Get the tables to fill and put it in a dictionary
-        # {table_name : xml instance corresponding to the table}
+        # Get the boards to fill and put it in a dictionary
+        # {board_name : xml instance corresponding to the board}
         for job in l_jobs + l_jobs_not_today:
-            table = job.table
-            if (table is not None and 
-                    table not in self.d_xml_table_files.keys()):
-                xml_table_path = os.path.join(self.xml_dir_path, table + ".xml")
-                self.d_xml_table_files[table] =  src.xmlManager.XmlLogFile(
-                                                            xml_table_path,
-                                                            "JobsReport")
-                self.d_xml_table_files[table].add_simple_node("distributions")
-                self.d_xml_table_files[table].add_simple_node("applications")
-                self.d_xml_table_files[table].add_simple_node("table", text=table)
+            board = job.board
+            if (board is not None and 
+                                board not in self.d_xml_board_files.keys()):
+                self.add_xml_board(board)
+        
+        # Verify that the boards given as input are done
+        for board in list(self.d_input_boards.keys()):
+            if board not in self.d_xml_board_files:
+                self.add_xml_board(board)
+            root_node = self.d_xml_board_files[board].xmlroot
+            src.xmlManager.append_node_attrib(root_node, 
+                                              {"input_file" : self.file_boards})
         
         # Loop over all jobs in order to get the lines and columns for each 
         # xml file
         d_dist = {}
         d_application = {}
-        for table in self.d_xml_table_files:
-            d_dist[table] = []
-            d_application[table] = []
+        for board in self.d_xml_board_files:
+            d_dist[board] = []
+            d_application[board] = []
             
         l_hosts_ports = []
             
@@ -984,30 +1189,59 @@ class Gui(object):
             if (job.machine.host, job.machine.port) not in l_hosts_ports:
                 l_hosts_ports.append((job.machine.host, job.machine.port))
                 
-            distrib = job.distribution
+            distrib = job.machine.distribution
             application = job.application
             
-            table_job = job.table
-            if table is None:
+            board_job = job.board
+            if board is None:
                 continue
-            for table in self.d_xml_table_files:
-                if table_job == table:
-                    if distrib is not None and distrib not in d_dist[table]:
-                        d_dist[table].append(distrib)
+            for board in self.d_xml_board_files:
+                if board_job == board:
+                    if (distrib not in [None, ''] and 
+                                            distrib not in d_dist[board]):
+                        d_dist[board].append(distrib)
                         src.xmlManager.add_simple_node(
-                            self.d_xml_table_files[table].xmlroot.find('distributions'),
+                            self.d_xml_board_files[board].xmlroot.find(
+                                                            'distributions'),
                                                    "dist",
                                                    attrib={"name" : distrib})
                     
-                if table_job == table:
-                    if application is not None and application not in d_application[table]:
-                        d_application[table].append(application)
-                        src.xmlManager.add_simple_node(self.d_xml_table_files[table].xmlroot.find('applications'),
+                if board_job == board:
+                    if (application not in [None, ''] and 
+                                    application not in d_application[board]):
+                        d_application[board].append(application)
+                        src.xmlManager.add_simple_node(
+                            self.d_xml_board_files[board].xmlroot.find(
+                                                                'applications'),
                                                    "application",
-                                                   attrib={"name" : application})
-
+                                                   attrib={
+                                                        "name" : application})
+        
+        # Verify that there are no missing application or distribution in the
+        # xml board files (regarding the input boards)
+        for board in self.d_xml_board_files:
+            l_dist = d_dist[board]
+            if board not in self.d_input_boards.keys():
+                continue
+            for dist in self.d_input_boards[board]["rows"]:
+                if dist not in l_dist:
+                    src.xmlManager.add_simple_node(
+                            self.d_xml_board_files[board].xmlroot.find(
+                                                            'distributions'),
+                                                   "dist",
+                                                   attrib={"name" : dist})
+            l_appli = d_application[board]
+            for appli in self.d_input_boards[board]["columns"]:
+                if appli not in l_appli:
+                    src.xmlManager.add_simple_node(
+                            self.d_xml_board_files[board].xmlroot.find(
+                                                                'applications'),
+                                                   "application",
+                                                   attrib={"name" : appli})
+                
         # Initialize the hosts_ports node for the global file
-        self.xmlhosts_ports = self.xml_global_file.add_simple_node("hosts_ports")
+        self.xmlhosts_ports = self.xml_global_file.add_simple_node(
+                                                                "hosts_ports")
         for host, port in l_hosts_ports:
             host_port = "%s:%i" % (host, port)
             src.xmlManager.add_simple_node(self.xmlhosts_ports,
@@ -1015,15 +1249,116 @@ class Gui(object):
                                            attrib={"name" : host_port})
         
         # Initialize the jobs node in all files
-        for xml_file in [self.xml_global_file] + self.d_xml_table_files.values():
+        for xml_file in [self.xml_global_file] + list(
+                                            self.d_xml_board_files.values()):
             xml_jobs = xml_file.add_simple_node("jobs")      
-            # Get the jobs present in the config file but that will not be launched
-            # today
+            # Get the jobs present in the config file but 
+            # that will not be launched today
             self.put_jobs_not_today(l_jobs_not_today, xml_jobs)
             
-            xml_file.add_simple_node("infos", attrib={"name" : "last update", "JobsCommandStatus" : "running"})
+            # add also the infos node
+            xml_file.add_simple_node("infos",
+                                     attrib={"name" : "last update",
+                                             "JobsCommandStatus" : "running"})
+            
+            # and put the history node
+            history_node = xml_file.add_simple_node("history")
+            name_board = os.path.basename(xml_file.logFile)[:-len(".xml")]
+            # serach for board files
+            expression = "^[0-9]{8}_+[0-9]{6}_" + name_board + ".xml$"
+            oExpr = re.compile(expression)
+            # Get the list of xml borad files that are in the log directory
+            for file_name in os.listdir(self.xml_dir_path):
+                if oExpr.search(file_name):
+                    date = os.path.basename(file_name).split("_")[0]
+                    file_path = os.path.join(self.xml_dir_path, file_name)
+                    src.xmlManager.add_simple_node(history_node,
+                                                   "link",
+                                                   text=file_path,
+                                                   attrib={"date" : date})      
+            
+                
+        # Find in each board the squares that needs to be filled regarding the
+        # input csv files but that are not covered by a today job
+        for board in self.d_input_boards.keys():
+            xml_root_board = self.d_xml_board_files[board].xmlroot
+            # Find the missing jobs for today
+            xml_missing = src.xmlManager.add_simple_node(xml_root_board,
+                                                 "missing_jobs")
+            for row, column in self.d_input_boards[board]["jobs"]:
+                found = False
+                for job in l_jobs:
+                    if (job.application == column and 
+                        job.machine.distribution == row):
+                        found = True
+                        break
+                if not found:
+                    src.xmlManager.add_simple_node(xml_missing,
+                                            "job",
+                                            attrib={"distribution" : row,
+                                                    "application" : column })
+            # Find the missing jobs not today
+            xml_missing_not_today = src.xmlManager.add_simple_node(
+                                                 xml_root_board,
+                                                 "missing_jobs_not_today")
+            for row, column in self.d_input_boards[board]["jobs_not_today"]:
+                found = False
+                for job in l_jobs_not_today:
+                    if (job.application == column and 
+                        job.machine.distribution == row):
+                        found = True
+                        break
+                if not found:
+                    src.xmlManager.add_simple_node(xml_missing_not_today,
+                                            "job",
+                                            attrib={"distribution" : row,
+                                                    "application" : column })
 
-    
+    def find_history(self, l_jobs, l_jobs_not_today):
+        """find, for each job, in the existent xml boards the results for the 
+           job. Store the results in the dictionnary self.history = {name_job : 
+           list of (date, status, list links)}
+        
+        :param l_jobs List: the list of jobs to run today   
+        :param l_jobs_not_today List: the list of jobs that do not run today
+        """
+        # load the all the history
+        expression = "^[0-9]{8}_+[0-9]{6}_" + self.global_name + ".xml$"
+        oExpr = re.compile(expression)
+        # Get the list of global xml that are in the log directory
+        l_globalxml = []
+        for file_name in os.listdir(self.xml_dir_path):
+            if oExpr.search(file_name):
+                file_path = os.path.join(self.xml_dir_path, file_name)
+                try:
+                    global_xml = src.xmlManager.ReadXmlFile(file_path)
+                    l_globalxml.append(global_xml)
+                except Exception as e:
+                    msg = _("\nWARNING: the file %s can not be read, it will be "
+                            "ignored\n%s" % (file_path, e))
+                    self.logger.write("%s\n" % src.printcolors.printcWarning(
+                                                                        msg), 5)
+                    
+        # Construct the dictionnary self.history 
+        for job in l_jobs + l_jobs_not_today:
+            l_links = []
+            for global_xml in l_globalxml:
+                date = os.path.basename(global_xml.filePath).split("_")[0]
+                global_root_node = global_xml.xmlroot.find("jobs")
+                job_node = src.xmlManager.find_node_by_attrib(
+                                                              global_root_node,
+                                                              "job",
+                                                              "name",
+                                                              job.name)
+                if job_node:
+                    if job_node.find("remote_log_file_path") is not None:
+                        link = job_node.find("remote_log_file_path").text
+                        res_job = job_node.find("res").text
+                        if link != "nothing":
+                            l_links.append((date, res_job, link))
+            l_links = sorted(l_links, reverse=True)
+            self.history[job.name] = l_links
+  
     def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs):
         '''Get all the first information needed for each file and write the 
            first version of the files   
@@ -1038,8 +1373,8 @@ class Gui(object):
             src.xmlManager.add_simple_node(xmlj, "application", job.application)
             src.xmlManager.add_simple_node(xmlj,
                                            "distribution",
-                                           job.distribution)
-            src.xmlManager.add_simple_node(xmlj, "table", job.table)
+                                           job.machine.distribution)
+            src.xmlManager.add_simple_node(xmlj, "board", job.board)
             src.xmlManager.add_simple_node(xmlj,
                                        "commands", " ; ".join(job.commands))
             src.xmlManager.add_simple_node(xmlj, "state", "Not today")
@@ -1049,13 +1384,82 @@ class Gui(object):
             src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
             src.xmlManager.add_simple_node(xmlj, "sat_path",
                                                         job.machine.sat_path)
-    
+            xml_history = src.xmlManager.add_simple_node(xmlj, "history")
+            for i, (date, res_job, link) in enumerate(self.history[job.name]):
+                if i==0:
+                    # tag the first one (the last one)
+                    src.xmlManager.add_simple_node(xml_history,
+                                                   "link",
+                                                   text=link,
+                                                   attrib={"date" : date,
+                                                           "res" : res_job,
+                                                           "last" : "yes"})
+                else:
+                    src.xmlManager.add_simple_node(xml_history,
+                                                   "link",
+                                                   text=link,
+                                                   attrib={"date" : date,
+                                                           "res" : res_job,
+                                                           "last" : "no"})
+
+    def parse_csv_boards(self, today):
+        """ Parse the csv file that describes the boards to produce and fill 
+            the dict d_input_boards that contain the csv file contain
+        
+        :param today int: the current day of the week 
+        """
+        # open the csv file and read its content
+        l_read = []
+        with open(self.file_boards, 'r') as f:
+            reader = csv.reader(f,delimiter=CSV_DELIMITER)
+            for row in reader:
+                l_read.append(row)
+        # get the delimiter for the boards (empty line)
+        boards_delimiter = [''] * len(l_read[0])
+        # Make the list of boards, by splitting with the delimiter
+        l_boards = [list(y) for x, y in itertools.groupby(l_read,
+                                    lambda z: z == boards_delimiter) if not x]
+           
+        # loop over the csv lists of lines and get the rows, columns and jobs
+        d_boards = {}
+        for input_board in l_boards:
+            # get board name
+            board_name = input_board[0][0]
+            
+            # Get columns list
+            columns = input_board[0][1:]
+            
+            rows = []
+            jobs = []
+            jobs_not_today = []
+            for line in input_board[1:]:
+                row = line[0]
+                rows.append(row)
+                for i, square in enumerate(line[1:]):
+                    if square=='':
+                        continue
+                    days = square.split(DAYS_SEPARATOR)
+                    days = [int(day) for day in days]
+                    job = (row, columns[i])
+                    if today in days:                           
+                        jobs.append(job)
+                    else:
+                        jobs_not_today.append(job)
+
+            d_boards[board_name] = {"rows" : rows,
+                                    "columns" : columns,
+                                    "jobs" : jobs,
+                                    "jobs_not_today" : jobs_not_today}
+        
+        self.d_input_boards = d_boards
+
     def update_xml_files(self, l_jobs):
         '''Write all the xml files with updated information about the jobs   
 
         :param l_jobs List: the list of jobs that run today
         '''
-        for xml_file in [self.xml_global_file] + self.d_xml_table_files.values():
+        for xml_file in [self.xml_global_file] + list(
+                                            self.d_xml_board_files.values()):
             self.update_xml_file(l_jobs, xml_file)
             
         # Write the file
@@ -1094,12 +1498,20 @@ class Gui(object):
             src.xmlManager.add_simple_node(xmlj, "host", job.machine.host)
             src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port))
             src.xmlManager.add_simple_node(xmlj, "user", job.machine.user)
+            xml_history = src.xmlManager.add_simple_node(xmlj, "history")
+            for date, res_job, link in self.history[job.name]:
+                src.xmlManager.add_simple_node(xml_history,
+                                               "link",
+                                               text=link,
+                                               attrib={"date" : date,
+                                                       "res" : res_job})
+
             src.xmlManager.add_simple_node(xmlj, "sat_path",
                                            job.machine.sat_path)
             src.xmlManager.add_simple_node(xmlj, "application", job.application)
             src.xmlManager.add_simple_node(xmlj, "distribution",
-                                           job.distribution)
-            src.xmlManager.add_simple_node(xmlj, "table", job.table)
+                                           job.machine.distribution)
+            src.xmlManager.add_simple_node(xmlj, "board", job.board)
             src.xmlManager.add_simple_node(xmlj, "timeout", str(job.timeout))
             src.xmlManager.add_simple_node(xmlj, "commands",
                                            " ; ".join(job.commands))
@@ -1119,6 +1531,16 @@ class Gui(object):
                 src.xmlManager.add_simple_node(xmlj,
                                                "remote_log_file_path",
                                                "nothing")           
+            # Search for the test log if there is any
+            l_test_log_files = self.find_test_log(job.remote_log_files)
+            xml_test = src.xmlManager.add_simple_node(xmlj,
+                                                      "test_log_file_path")
+            for test_log_path, res_test, nb_fails in l_test_log_files:
+                test_path_node = src.xmlManager.add_simple_node(xml_test,
+                                               "path",
+                                               test_log_path)
+                test_path_node.attrib["res"] = res_test
+                test_path_node.attrib["nb_fails"] = nb_fails
             
             xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after)
             # get the job father
@@ -1127,18 +1549,31 @@ class Gui(object):
                 for jb in l_jobs:
                     if jb.name == job.after:
                         job_father = jb
-                if job_father is None:
-                    msg = _("The job %(father_name)s that is parent of "
-                            "%(son_name)s is not in the job list." %
-                            {"father_name" : job.after , "son_name" : job.name})
-                    raise src.SatException(msg)
                 
-                if len(job_father.remote_log_files) > 0:
+                if (job_father is not None and 
+                        len(job_father.remote_log_files) > 0):
                     link = job_father.remote_log_files[0]
                 else:
                     link = "nothing"
                 src.xmlManager.append_node_attrib(xmlafter, {"link" : link})
             
+            # Verify that the job is to be done today regarding the input csv
+            # files
+            if job.board and job.board in self.d_input_boards.keys():
+                found = False
+                for dist, appli in self.d_input_boards[job.board]["jobs"]:
+                    if (job.machine.distribution == dist 
+                        and job.application == appli):
+                        found = True
+                        src.xmlManager.add_simple_node(xmlj,
+                                               "extra_job",
+                                               "no")
+                        break
+                if not found:
+                    src.xmlManager.add_simple_node(xmlj,
+                                               "extra_job",
+                                               "yes")
+            
         
         # Update the date
         xml_node_infos = xml_file.xmlroot.find('infos')
@@ -1147,6 +1582,33 @@ class Gui(object):
                     datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")})
                
 
+    def find_test_log(self, l_remote_log_files):
+        '''Find if there is a test log (board) in the remote log files and 
+           the path to it. There can be several test command, so the result is
+           a list.
+
+        :param l_remote_log_files List: the list of all remote log files
+        :return: the list of (test log files path, res of the command)
+        :rtype: List
+        '''
+        res = []
+        for file_path in l_remote_log_files:
+            dirname = os.path.basename(os.path.dirname(file_path))
+            file_name = os.path.basename(file_path)
+            regex = src.logger.log_all_command_file_expression
+            oExpr = re.compile(regex)
+            if dirname == "TEST" and oExpr.search(file_name):
+                # find the res of the command
+                prod_node = etree.parse(file_path).getroot().find("product")
+                res_test = prod_node.attrib["global_res"]
+                # find the number of fails
+                testbase_node = prod_node.find("tests").find("testbase")
+                nb_fails = int(testbase_node.attrib["failed"])
+                # put the file path, the res of the test command and the number 
+                # of fails in the output
+                res.append((file_path, res_test, nb_fails))
+                
+        return res
     
     def last_update(self, finish_status = "finished"):
         '''update information about the jobs for the file xml_file   
@@ -1154,25 +1616,88 @@ class Gui(object):
         :param l_jobs List: the list of jobs that run today
         :param xml_file xmlManager.XmlLogFile: the xml instance to update
         '''
-        for xml_file in [self.xml_global_file] + self.d_xml_table_files.values():
+        for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()):
             xml_node_infos = xml_file.xmlroot.find('infos')
             src.xmlManager.append_node_attrib(xml_node_infos,
                         attrib={"JobsCommandStatus" : finish_status})
         # Write the file
         self.write_xml_files()
-    
+
+    def write_xml_file(self, xml_file, stylesheet):
+        ''' Write one xml file and the same file with prefix
+        '''
+        xml_file.write_tree(stylesheet)
+        file_path = xml_file.logFile
+        file_dir = os.path.dirname(file_path)
+        file_name = os.path.basename(file_path)
+        file_name_with_prefix = self.prefix + "_" + file_name
+        xml_file.write_tree(stylesheet, os.path.join(file_dir,
+                                                     file_name_with_prefix))
+        
     def write_xml_files(self):
         ''' Write the xml files   
         '''
-        self.xml_global_file.write_tree(STYLESHEET_GLOBAL)
-        for xml_file in self.d_xml_table_files.values():
-            xml_file.write_tree(STYLESHEET_TABLE)
-        
+        self.write_xml_file(self.xml_global_file, STYLESHEET_GLOBAL)
+        for xml_file in self.d_xml_board_files.values():
+            self.write_xml_file(xml_file, STYLESHEET_BOARD)
+
+def get_config_file_path(job_config_name, l_cfg_dir):
+    found = False
+    file_jobs_cfg = None
+    if os.path.exists(job_config_name) and job_config_name.endswith(".pyconf"):
+        found = True
+        file_jobs_cfg = job_config_name
+    else:
+        for cfg_dir in l_cfg_dir:
+            file_jobs_cfg = os.path.join(cfg_dir, job_config_name)
+            if not file_jobs_cfg.endswith('.pyconf'):
+                file_jobs_cfg += '.pyconf'
+            
+            if not os.path.exists(file_jobs_cfg):
+                continue
+            else:
+                found = True
+                break
+    return found, file_jobs_cfg
+
+def develop_factorized_jobs(config_jobs):
+    '''update information about the jobs for the file xml_file   
+    
+    :param config_jobs Config: the config corresponding to the jos description
+    '''
+    developed_jobs_list = []
+    for jb in config_jobs.jobs:
+        # case where the jobs are not developed
+        if type(jb.machine) == type(""):
+            developed_jobs_list.append(jb)
+            continue
+        # Case where the jobs must be developed
+        # Example:
+        # machine : ["CO7.2 physique", ["CO6.4 physique", $MONDAY, $TUESDAY ], "FD22"]
+        name_job = jb.name
+        for machine in jb.machine:
+            new_job = src.pyconf.deepCopyMapping(jb)
+            # case where there is a jobs on the machine corresponding to all
+            # days in when variable. 
+            if type(machine) == type(""):
+                new_job.machine = machine
+                new_job.name = name_job + " / " + machine
+            else:
+                # case the days are re defined
+                new_job.machine = machine[0]
+                new_job.name = name_job + " / " + machine[0]
+                new_job.when = machine[1:]
+            developed_jobs_list.append(new_job)
+    
+    config_jobs.jobs = developed_jobs_list
+            
+
 ##
 # Describes the command
 def description():
     return _("The jobs command launches maintenances that are described"
-             " in the dedicated jobs configuration file.")
+             " in the dedicated jobs configuration file.\n\nexample:\nsat "
+             "jobs --name my_jobs --publish")
 
 ##
 # Runs the command.
@@ -1180,13 +1705,7 @@ def run(args, runner, logger):
        
     (options, args) = parser.parse_args(args)
        
-    jobs_cfg_files_dir = runner.cfg.SITE.jobs.config_path
-    
-    l_cfg_dir = [jobs_cfg_files_dir,
-                 os.path.join(runner.cfg.VARS.datadir, "jobs")]
-    
-    # Make sure the path to the jobs config files directory exists 
-    src.ensure_path_exists(jobs_cfg_files_dir)   
+    l_cfg_dir = runner.cfg.PATHS.JOBPATH
     
     # list option : display all the available config files
     if options.list:
@@ -1194,7 +1713,8 @@ def run(args, runner, logger):
             if not options.no_label:
                 logger.write("------ %s\n" % 
                                  src.printcolors.printcHeader(cfg_dir))
-    
+            if not os.path.exists(cfg_dir):
+                continue
             for f in sorted(os.listdir(cfg_dir)):
                 if not f.endswith('.pyconf'):
                     continue
@@ -1205,49 +1725,66 @@ def run(args, runner, logger):
     # Make sure the jobs_config option has been called
     if not options.jobs_cfg:
         message = _("The option --jobs_config is required\n")      
-        raise src.SatException( message )
-    
-    # Find the file in the directories
-    found = False
-    for cfg_dir in l_cfg_dir:
-        file_jobs_cfg = os.path.join(cfg_dir, options.jobs_cfg)
-        if not file_jobs_cfg.endswith('.pyconf'):
-            file_jobs_cfg += '.pyconf'
-        
-        if not os.path.exists(file_jobs_cfg):
-            continue
-        else:
-            found = True
-            break
-    
-    if not found:
-        msg = _("The file configuration %(name_file)s was not found."
-                "\nUse the --list option to get the possible files.")
-        src.printcolors.printcError(msg)
+        src.printcolors.printcError(message)
         return 1
     
+    # Find the file in the directories, unless it is a full path
+    # merge all in a config
+    merger = src.pyconf.ConfigMerger()
+    config_jobs = src.pyconf.Config()
+    l_conf_files_path = []
+    for config_file in options.jobs_cfg:
+        found, file_jobs_cfg = get_config_file_path(config_file, l_cfg_dir)
+        if not found:
+            msg = _("The file configuration %s was not found."
+                    "\nUse the --list option to get the "
+                    "possible files." % config_file)
+            logger.write("%s\n" % src.printcolors.printcError(msg), 1)
+            return 1
+        l_conf_files_path.append(file_jobs_cfg)
+        # Read the config that is in the file
+        one_config_jobs = src.read_config_from_a_file(file_jobs_cfg)
+        merger.merge(config_jobs, one_config_jobs)
+    
     info = [
         (_("Platform"), runner.cfg.VARS.dist),
-        (_("File containing the jobs configuration"), file_jobs_cfg)
+        (_("Files containing the jobs configuration"), l_conf_files_path)
     ]    
     src.print_info(logger, info)
-    
-    # Read the config that is in the file
-    config_jobs = src.read_config_from_a_file(file_jobs_cfg)
+
     if options.only_jobs:
         l_jb = src.pyconf.Sequence()
         for jb in config_jobs.jobs:
             if jb.name in options.only_jobs:
                 l_jb.append(jb,
-                "Adding a job that was given in only_jobs option parameters")
+                "Job that was given in only_jobs option parameters\n")
         config_jobs.jobs = l_jb
-              
+    
+    # Parse the config jobs in order to develop all the factorized jobs
+    develop_factorized_jobs(config_jobs)
+    
+    # Make a unique file that contain all the jobs in order to use it 
+    # on every machine
+    name_pyconf = "_".join([os.path.basename(path)[:-len('.pyconf')] 
+                            for path in l_conf_files_path]) + ".pyconf"
+    path_pyconf = src.get_tmp_filename(runner.cfg, name_pyconf)
+    #Save config
+    f = file( path_pyconf , 'w')
+    config_jobs.__save__(f)
+    
+    # log the paramiko problems
+    log_dir = src.get_log_path(runner.cfg)
+    paramiko_log_dir_path = os.path.join(log_dir, "JOBS")
+    src.ensure_path_exists(paramiko_log_dir_path)
+    paramiko.util.log_to_file(os.path.join(paramiko_log_dir_path,
+                                           logger.txtFileName))
+    
     # Initialization
     today_jobs = Jobs(runner,
                       logger,
-                      options.jobs_cfg,
-                      file_jobs_cfg,
+                      path_pyconf,
                       config_jobs)
+    
     # SSH connection to all machines
     today_jobs.ssh_connection_all_machines()
     if options.test_connection:
@@ -1255,10 +1792,48 @@ def run(args, runner, logger):
     
     gui = None
     if options.publish:
-        gui = Gui("/export/home/serioja/LOGS",
+        logger.write(src.printcolors.printcInfo(
+                                        _("Initialize the xml boards : ")), 5)
+        logger.flush()
+        
+        # Copy the stylesheets in the log directory 
+        log_dir = log_dir
+        xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
+        files_to_copy = []
+        files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
+        files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD))
+        files_to_copy.append(os.path.join(xsl_dir, "command.xsl"))
+        files_to_copy.append(os.path.join(xsl_dir, "running.gif"))
+        for file_path in files_to_copy:
+            # OP We use copy instead of copy2 to update the creation date
+            #    So we can clean the LOGS directories easily
+            shutil.copy(file_path, log_dir)
+        
+        # Instanciate the Gui in order to produce the xml files that contain all
+        # the boards
+        gui = Gui(log_dir,
                   today_jobs.ljobs,
-                  today_jobs.ljobs_not_today,)
-    
+                  today_jobs.ljobs_not_today,
+                  runner.cfg.VARS.datehour,
+                  logger,
+                  file_boards = options.input_boards)
+        
+        logger.write(src.printcolors.printcSuccess("OK"), 5)
+        logger.write("\n\n", 5)
+        logger.flush()
+        
+        # Display the list of the xml files
+        logger.write(src.printcolors.printcInfo(("Here is the list of published"
+                                                 " files :\n")), 4)
+        logger.write("%s\n" % gui.xml_global_file.logFile, 4)
+        for board in gui.d_xml_board_files.keys():
+            file_path = gui.d_xml_board_files[board].logFile
+            file_name = os.path.basename(file_path)
+            logger.write("%s\n" % file_path, 4)
+            logger.add_link(file_name, "board", 0, board)
+              
+        logger.write("\n", 4)
+        
     today_jobs.gui = gui
     
     interruped = False
@@ -1269,15 +1844,46 @@ def run(args, runner, logger):
         interruped = True
         logger.write("\n\n%s\n\n" % 
                 (src.printcolors.printcWarning(_("Forced interruption"))), 1)
+    except Exception as e:
+        msg = _("CRITICAL ERROR: The jobs loop has been interrupted\n")
+        logger.write("\n\n%s\n" % src.printcolors.printcError(msg) )
+        logger.write("%s\n" % str(e))
+        # get stack
+        __, __, exc_traceback = sys.exc_info()
+        fp = tempfile.TemporaryFile()
+        traceback.print_tb(exc_traceback, file=fp)
+        fp.seek(0)
+        stack = fp.read()
+        logger.write("\nTRACEBACK: %s\n" % stack.replace('"',"'"), 1)
         
     finally:
+        res = 0
+        if interruped:
+            res = 1
+            msg = _("Killing the running jobs and trying"
+                    " to get the corresponding logs\n")
+            logger.write(src.printcolors.printcWarning(msg))
+            
         # find the potential not finished jobs and kill them
         for jb in today_jobs.ljobs:
             if not jb.has_finished():
-                jb.kill_remote_process()
+                res = 1
+                try:
+                    jb.kill_remote_process()
+                except Exception as e:
+                    msg = _("Failed to kill job %s: %s\n" % (jb.name, e))
+                    logger.write(src.printcolors.printcWarning(msg))
+            if jb.res_job != "0":
+                res = 1
         if interruped:
-            today_jobs.gui.last_update(_("Forced interruption"))
+            if today_jobs.gui:
+                today_jobs.gui.last_update(_("Forced interruption"))
         else:
-            today_jobs.gui.last_update()
+            if today_jobs.gui:
+                today_jobs.gui.last_update()
         # Output the results
         today_jobs.write_all_results()
+        # Remove the temporary pyconf file
+        if os.path.exists(path_pyconf):
+            os.remove(path_pyconf)
+        return res