import shutil
import itertools
import re
-import paramiko
+
+# generate problem
+try:
+ import paramiko
+except:
+ paramiko = "import paramiko impossible"
+ pass
import src
+
+
import src.ElementTree as etree
STYLESHEET_GLOBAL = "jobs_global_report.xsl"
" job --jobs_config " +
os.path.join(self.machine.sat_path,
self.name_remote_jobs_pyconf) +
- " --name " +
- self.name)
+ " --name " + self.name)
if prefix:
self.command = prefix + ' "' + self.command +'"'
self._has_finished = True
self._has_timouted = True
self._Tf = time.time()
- (out_kill, _) = self.kill_remote_process()
+ (out_kill, __) = self.kill_remote_process()
self.out += "TIMEOUT \n" + out_kill
self.err += "TIMEOUT : %s seconds elapsed\n" % str(self.timeout)
try:
self.get_log_files()
except Exception as e:
- # The 2 following lines must be suppressed after the bug is fixed
- print("The error type: ")
- print(type(e))
- print("The error: ")
- print(e)
- self.err += _("Unable to get remote log files: %s" % str(e))
+ self.err += _("Unable to get remote log files!\n%s\n" % str(e))
def total_duration(self):
"""Give the total duration of the job
msg = _("WARNING: The job \"%(job_name)s\" requires the "
"machine \"%(machine_name)s\" but this machine "
"is not defined in the configuration file.\n"
- "The job will not be launched")
- self.logger.write(src.printcolors.printcWarning(msg))
+ "The job will not be launched\n")
+ self.logger.write(src.printcolors.printcWarning(
+ msg % {"job_name" : job_def.name,
+ "machine_name" : name_machine}))
+ continue
a_job = self.define_job(job_def, a_machine)
self.logger.flush()
res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway,
self.job_file_path)
+
+ # set the local settings of sat on the remote machine using
+ # the init command
+ (__, out_dist, __) = machine.exec_command(
+ os.path.join(machine.sat_path,
+ "sat init --base default --workdir"
+ " default --log_dir default"),
+ self.logger)
+ out_dist.read()
+
# get the remote machine distribution using a sat command
(__, out_dist, __) = machine.exec_command(
os.path.join(machine.sat_path,
self.logger)
machine.distribution = out_dist.read().decode().replace("\n",
"")
+
# Print the status of the copy
if res_copy == 0:
self.logger.write('\r%s' %
continue
for board in self.d_xml_board_files:
if board_job == board:
- if distrib is not None and distrib not in d_dist[board]:
+ if (distrib not in [None, ''] and
+ distrib not in d_dist[board]):
d_dist[board].append(distrib)
src.xmlManager.add_simple_node(
self.d_xml_board_files[board].xmlroot.find(
attrib={"name" : distrib})
if board_job == board:
- if (application is not None and
+ if (application not in [None, ''] and
application not in d_application[board]):
d_application[board].append(application)
src.xmlManager.add_simple_node(
break
return found, file_jobs_cfg
+def develop_factorized_jobs(config_jobs):
+ '''update information about the jobs for the file xml_file
+
+ :param config_jobs Config: the config corresponding to the jos description
+ '''
+ developed_jobs_list = []
+ for jb in config_jobs.jobs:
+ # case where the jobs are not developed
+ if type(jb.machine) == type(""):
+ developed_jobs_list.append(jb)
+ continue
+ # Case where the jobs must be developed
+ # Example:
+ # machine : ["CO7.2 physique", ["CO6.4 physique", $MONDAY, $TUESDAY ], "FD22"]
+ name_job = jb.name
+ for machine in jb.machine:
+ new_job = src.pyconf.deepCopyMapping(jb)
+ # case where there is a jobs on the machine corresponding to all
+ # days in when variable.
+ if type(machine) == type(""):
+ new_job.machine = machine
+ new_job.name = name_job + " / " + machine
+ else:
+ # case the days are re defined
+ new_job.machine = machine[0]
+ new_job.name = name_job + " / " + machine[0]
+ new_job.when = machine[1:]
+ developed_jobs_list.append(new_job)
+
+ config_jobs.jobs = developed_jobs_list
+
+
##
# Describes the command
def description():
"Job that was given in only_jobs option parameters\n")
config_jobs.jobs = l_jb
+ # Parse the config jobs in order to develop all the factorized jobs
+ develop_factorized_jobs(config_jobs)
+
# Make a unique file that contain all the jobs in order to use it
# on every machine
name_pyconf = "_".join([os.path.basename(path)[:-len('.pyconf')]
f = file( path_pyconf , 'w')
config_jobs.__save__(f)
+ # log the paramiko problems
+ log_dir = src.get_log_path(runner.cfg)
+ paramiko_log_dir_path = os.path.join(log_dir, "JOBS")
+ src.ensure_path_exists(paramiko_log_dir_path)
+ paramiko.util.log_to_file(os.path.join(paramiko_log_dir_path,
+ logger.txtFileName))
+
# Initialization
today_jobs = Jobs(runner,
logger,
path_pyconf,
config_jobs)
+
# SSH connection to all machines
today_jobs.ssh_connection_all_machines()
if options.test_connection:
logger.flush()
# Copy the stylesheets in the log directory
- log_dir = runner.cfg.USER.log_dir
+ log_dir = log_dir
xsl_dir = os.path.join(runner.cfg.VARS.srcDir, 'xsl')
files_to_copy = []
files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_GLOBAL))
files_to_copy.append(os.path.join(xsl_dir, STYLESHEET_BOARD))
+ files_to_copy.append(os.path.join(xsl_dir, "command.xsl"))
files_to_copy.append(os.path.join(xsl_dir, "running.gif"))
for file_path in files_to_copy:
- shutil.copy2(file_path, log_dir)
+ # OP We use copy instead of copy2 to update the creation date
+ # So we can clean the LOGS directories easily
+ shutil.copy(file_path, log_dir)
# Instanciate the Gui in order to produce the xml files that contain all
# the boards
- gui = Gui(runner.cfg.USER.log_dir,
+ gui = Gui(log_dir,
today_jobs.ljobs,
today_jobs.ljobs_not_today,
runner.cfg.VARS.datehour,