From: Christian Van Wambeke Date: Fri, 27 Apr 2018 14:40:23 +0000 (+0200) Subject: begin fix logger.write X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=1369cc628cc636348fae228435859b250a0f02bf;p=tools%2Fsat.git begin fix logger.write --- diff --git a/commands/application.py b/commands/application.py index d9017b0..a145061 100644 --- a/commands/application.py +++ b/commands/application.py @@ -365,11 +365,7 @@ def create_application(config, appli_dir, catalog, logger, display=True): logger.warning(msg) # generate the launch file - retcode = generate_launch_file(config, - appli_dir, - catalog, - logger, - SALOME_modules) + retcode = generate_launch_file(config, appli_dir, catalog, logger, SALOME_modules) if retcode == 0: cmd = UTS.label("%s/salome" % appli_dir) diff --git a/commands/config.py b/commands/config.py index 2f063e8..287e7e7 100644 --- a/commands/config.py +++ b/commands/config.py @@ -229,4 +229,4 @@ If a name is given the new config file takes the given name.""")) for product_name in config.APPLICATION.products.keys(): logger.info("%s\n" % product_name) - return RCO.ReturnCode("OK") + return RCO.ReturnCode("OK", "config command done") diff --git a/commands/generate.py b/commands/generate.py index 99edea1..ced2b8d 100644 --- a/commands/generate.py +++ b/commands/generate.py @@ -71,26 +71,25 @@ class Command(_BaseCommand): # Check that the command has been called with an application src.check_config_has_application(config) - logger.write(_('Generation of SALOME modules for application %s\n') % \ - UTS.label(config.VARS.application), 1) + logger.info( _('Generation of SALOME modules for application %s\n') % \ + UTS.label(config.VARS.application) ) status = src.KO_STATUS # verify that YACSGEN is available - yacsgen_dir = check_yacsgen(config, options.yacsgen, logger) - - if isinstance(yacsgen_dir, tuple): - # The check failed - __, error = yacsgen_dir - msg = _("check yacsgen: %s\n") % error - logger.error(msg) - return 1 + returnCode = check_yacsgen(config, options.yacsgen, logger) + if not returnCode.isOk(): + logger.error(returnCode.getWhy()) + return returnCode + else: + yacsgen_dir = returnCode.getValue() + # Make the generator module visible by python sys.path.insert(0, yacsgen_dir) - + logger.info(" insert directory PATH %s = %s\n" % \ - ("YACSGEN", UTS.blue(yacsgen_dir) + ("YACSGEN", UTS.blue(yacsgen_dir)) ) products = config.APPLICATION.products if options.products: @@ -100,19 +99,19 @@ class Command(_BaseCommand): nbgen = 0 context = build_context(config, logger) + lprod = UTS.label(product) for product in products: - header = _("Generating %s") % UTS.label(product) + header = _("Generating %s") % lprod header += " %s " % ("." * (20 - len(product))) - logger.write(header, 3) - logger.flush() + logger.info(header) if product not in config.PRODUCTS: - logger.write(_("Unknown product\n"), 3, False) + logger.error(_("Unknown product %s") % lprod) continue pi = src.product.get_product_config(config, product) if not src.product.product_is_generated(pi): - logger.write(_("not a generated product\n"), 3, False) + logger.info(_("not a generated product %s") % lprod) continue nbgen += 1 @@ -125,19 +124,15 @@ class Command(_BaseCommand): result = _("ERROR: %s") % result details.append([product, result]) - if len(details) == 0: - status = src.OK_STATUS - else: #if config.USER.output_level != 3: - logger.write("\n", 2, False) - logger.write(_("The following modules were not generated correctly:\n"), 2) + if len(details) != 0: + msg = _("The following modules were not generated correctly:\n") for d in details: - logger.write(" %s: %s\n" % (d[0], d[1]), 2, False) - logger.write("\n", 2, False) + msg += " %s: %s\n" % (d[0], d[1]) + logger.error(msg) + return RCO.ReturnCode("KO", msg) + else: + return RCO.ReturnCode("OK", "generate command done") - if status == src.OK_STATUS: - return 0 - return len(details) - def generate_component_list(config, product_info, context, logger): res = "?" @@ -201,7 +196,7 @@ def generate_component(config, compo, product_info, context, header, logger): # delete previous generated directory if it already exists if os.path.exists(compo_info.source_dir): - logger.write(" delete %s\n" % compo_info.source_dir, 4) + logger.debug(" delete %s" % compo_info.source_dir) shutil.rmtree(compo_info.source_dir) # generate generates in the current directory => change for generate dir @@ -309,11 +304,9 @@ def build_context(config, logger): val = os.getenv(prod_env) if os.getenv(prod_env) is None: if p not in config.APPLICATION.products: - warn = _("product %(product)s is not defined. Include it in the" - " application or define $%(env)s.") % \ - { "product": p, "env": prod_env} - logger.write(UTS.red(warn), 1) - logger.write("\n", 3, False) + msg = _("product %s is not defined. Include it in the application or define $%s.") % \ + (p, prod_env) + logger.error(UTS.red(msg)) val = "" val = ctxenv.environ.environ[prod_env] dicdir[p] = val @@ -363,8 +356,7 @@ def check_yacsgen(config, directory, logger): :param config Config: The global configuration. :param directory str: The directory given by option --yacsgen :param logger Logger: The logger instance - :return: The path to yacsgen directory - :rtype: str + :return: RCO.ReturnCode with value The path to yacsgen directory if ok """ # first check for YACSGEN (command option, then product, then environment) yacsgen_dir = None @@ -381,31 +373,28 @@ def check_yacsgen(config, directory, logger): yacs_src = _("Using YACSGEN from environment") if yacsgen_dir is None: - return (False, _("The generate command requires YACSGEN.")) + RCO.ReturnCode("KO", _("The generate command requires YACSGEN.")) - logger.write(" %s\n" % yacs_src, 2, True) - logger.write(" %s\n" % yacsgen_dir, 5, True) + logger.info(" %s in %s" % (yacs_src, yacsgen_dir) if not os.path.exists(yacsgen_dir): - message = _("YACSGEN directory not found: '%s'") % yacsgen_dir - return (False, _(message)) + msg = _("YACSGEN directory not found: '%s'") % yacsgen_dir + RCO.ReturnCode("KO", msg) # load module_generator c = check_module_generator(yacsgen_dir) if c is not None: - return c + return RCO.ReturnCode("OK", "check_module_generator on %s" % yacsgen_dir, c) pv = os.getenv("PYTHON_VERSION") if pv is None: python_info = src.product.get_product_config(config, "Python") pv = '.'.join(python_info.version.split('.')[:2]) assert pv is not None, "$PYTHON_VERSION not defined" - yacsgen_dir = os.path.join(yacsgen_dir, "lib", "python%s" % pv, - "site-packages") + yacsgen_dir = os.path.join(yacsgen_dir, "lib", "python%s" % pv, "site-packages") c = check_module_generator(yacsgen_dir) if c is not None: - return c + return RCO.ReturnCode("OK", "check_module_generator on %s" % yacsgen_dir, c) - return (False, - _("The python module module_generator was not found in YACSGEN")) + return RCO.ReturnCode("KO", _("The python module module_generator was not found in YACSGEN")) diff --git a/commands/init.py b/commands/init.py index ddd713a..3a04ccc 100644 --- a/commands/init.py +++ b/commands/init.py @@ -76,8 +76,7 @@ class Command(_BaseCommand): options = self.getOptions() # Print some informations - logger.write(_('Local Settings of SAT %s\n\n') % \ - UTS.label(config.VARS.salometoolsway), 1) + logger.info(_('Local Settings of SAT %s') % UTS.label(config.VARS.salometoolsway)) res = 0 @@ -128,10 +127,10 @@ def set_local_value(config, key, value, logger): except Exception as e: err = str(e) msg = _("Unable to update the local.pyconf file: %s\n") % err - logger.write(msg, 1) - return 1 + logger.error(msg) + return RCO.ReturnCode("KO", msg) - return 0 + return RCO.ReturnCode("OK") def display_local_values(config, logger): """ Display the base path diff --git a/commands/job.py b/commands/job.py index ac03f09..a767eba 100644 --- a/commands/job.py +++ b/commands/job.py @@ -116,8 +116,7 @@ Use the --list option to get the possible files.""") % UTS.blue(fPyconf) found = True break if not found: - msg = _("Impossible to find the job %s in %s\n" % \ - (options.job, file_jobs_cfg) + msg = _("Impossible to find the job %s in %s\n") % (options.job, file_jobs_cfg) logger.error(msg) return 1 @@ -158,7 +157,6 @@ Use the --list option to get the possible files.""") % UTS.blue(fPyconf) "." * (len_max_command - len(command)) + " ") error = "" - stack = "" # Execute the command code = sat_command(end_cmd, options = options, @@ -173,18 +171,14 @@ Use the --list option to get the possible files.""") % UTS.blue(fPyconf) else: if sat_command_name != "test": res = 1 - logger.write(': %s\n' % error) - - if len(stack) > 0: - logger.write('stack: %s\n' % stack, 3) + logger.info(': %s\n' % error) # Print the final state if res == 0: - final_status = "" + final_status = "OK" else: - final_status = "" - - logger.info(_("\nCommands: %s (%d/%d)\n") % \ - (final_status, nb_pass, len(commands))) - - return res + final_status = "KO" + + msg = "Commands: <%s> (%d/%d)" % (final_status, nb_pass, len(commands)) + logger.info(msg) + return RCO.ReturnCode(final_status, msg) diff --git a/commands/jobs.py b/commands/jobs.py index d9554f3..ba97fcf 100644 --- a/commands/jobs.py +++ b/commands/jobs.py @@ -34,8 +34,9 @@ import src.ElementTree as etree import src.debug as DBG import src.returnCode as RCO import src.utilsSat as UTS -from src.salomeTools import _BaseCommand import src.pyconf as PYCONF +import src.xmlManager as XMLMGR +from src.salomeTools import _BaseCommand STYLESHEET_GLOBAL = "jobs_global_report.xsl" STYLESHEET_BOARD = "jobs_board_report.xsl" @@ -137,14 +138,14 @@ class Command(_BaseCommand): if not f.endswith('.pyconf'): continue cfilename = f[:-7] - logger.write("%s\n" % cfilename) - return 0 + logger.info("%s\n" % cfilename) + return RCO.ReturnCode("OK", "jobs command done") # Make sure the jobs_config option has been called if not options.jobs_cfg: - message = _("The option --jobs_config is required\n") + msg = _("The option --jobs_config is required\n") logger.error(message) - return 1 + return RCO.ReturnCode("KO", msg) # Find the file in the directories, unless it is a full path # merge all in a config @@ -158,7 +159,7 @@ class Command(_BaseCommand): The file configuration %s was not found. Use the --list option to get the possible files.\n""") % config_file logger.error(msg) - return 1 + return RCO.ReturnCode("KO", msg) l_conf_files_path.append(file_jobs_cfg) # Read the config that is in the file one_config_jobs = src.read_config_from_a_file(file_jobs_cfg) @@ -197,22 +198,17 @@ Use the --list option to get the possible files.\n""") % config_file logger.txtFileName)) # Initialization - today_jobs = Jobs(runner, - logger, - path_pyconf, - config_jobs) + today_jobs = Jobs(runner, logger, path_pyconf, config_jobs) # SSH connection to all machines today_jobs.ssh_connection_all_machines() if options.test_connection: - return 0 + return RCO.ReturnCode("OK", "jobs ssh_connection done") gui = None if options.publish: - logger.write(UTS.info( - _("Initialize the xml boards : ")), 5) - logger.flush() - + logger.debug(_("Initialize the xml boards : ")) + # Copy the stylesheets in the log directory log_dir = log_dir xsl_dir = os.path.join(config.VARS.srcDir, 'xsl') @@ -238,16 +234,15 @@ Use the --list option to get the possible files.\n""") % config_file logger.debug("\n\n") # Display the list of the xml files - logger.write(UTS.info(("Here is the list of published" - " files :\n")), 4) - logger.write("%s\n" % gui.xml_global_file.logFile, 4) + logger.info(("List of published files:\n%s\n") % gui.xml_global_file.logFile) + msg = "" for board in gui.d_xml_board_files.keys(): file_path = gui.d_xml_board_files[board].logFile file_name = os.path.basename(file_path) - logger.write("%s\n" % file_path, 4) + msg += "%s\n" % file_path logger.add_link(file_name, "board", 0, board) - logger.write("\n", 4) + logger.info(msg) today_jobs.gui = gui @@ -257,31 +252,31 @@ Use the --list option to get the possible files.\n""") % config_file today_jobs.run_jobs() except KeyboardInterrupt: interruped = True - logger.critical(UTS.red(_("KeyboardInterrupt forced interruption\n")) + logger.critical(UTS.red(_("KeyboardInterrupt forced interruption"))) except Exception as e: # verbose debug message with traceback - msg = _("Exception raised, the jobs loop has been interrupted:\n\n%s\n") - import traceback - logger.critical( msg % UTS.yellow(traceback.format_exc())) - + msg = _("Exception raised, the jobs loop has been interrupted:\n\n%s") + logger.critical(msg % UTS.yellow(traceback.format_exc())) finally: - res = 0 + # make clear kill subprocess + res = RCO.ReturnCode("OK", "jobs command finally done") if interruped: - res = 1 msg = _("Killing the running jobs and trying to get the corresponding logs\n") - logger.write(UTS.red(msg)) + logger.warning(UTS.red(msg)) + res = RCO.ReturnCode("KO", msg) # find the potential not finished jobs and kill them for jb in today_jobs.ljobs: if not jb.has_finished(): - res = 1 + res += RCO.ReturnCode("KO", "job %s has not finished" % jb.name) try: jb.kill_remote_process() except Exception as e: - msg = _("Failed to kill job %(1)s: %(2)s\n") % {"1": jb.name, "2": e} - logger.write(UTS.red(msg)) + msg = _("Failed to kill job %s: %s\n") % (jb.name, e) + logger.warning(UTS.red(msg)) + res += RCO.ReturnCode("KO", msg) if jb.res_job != "0": - res = 1 + res += RCO.ReturnCode("KO", "job %s fail" % jb.name) if interruped: if today_jobs.gui: today_jobs.gui.last_update(_("Forced interruption")) @@ -349,24 +344,24 @@ class Machine(object): return message def successfully_connected(self, logger): - '''Verify if the connection to the remote machine has succeed + """\ + Verify if the connection to the remote machine has succeed :param logger src.logger.Logger: The logger instance :return: True if the connection has succeed, False if not :rtype: bool - ''' + """ if self._connection_successful == None: message = _("""\ -WARNING : trying to ask if the connection to - (name: %(1)s host: %(2)s, port: %(3)s, user: %(4)s) is OK - whereas there were no connection request""" % - {"1": self.name, "2": self.host, "3": self.port, "4": self.user} ) - logger.write( UTS.red(message)) +Ask if the connection +(name: %(1)s host: %(2)s, port: %(3)s, user: %(4)s) is OK +whereas there were no connection request""" % \ + {"1": self.name, "2": self.host, "3": self.port, "4": self.user} ) + logger.critical(UTS.red(message)) return self._connection_successful def copy_sat(self, sat_local_path, job_file): - '''Copy salomeTools to the remote machine in self.sat_path - ''' + """Copy salomeTools to the remote machine in self.sat_path""" res = 0 try: # open a sftp connection @@ -378,8 +373,7 @@ WARNING : trying to ask if the connection to # put the job configuration file in order to make it reachable # on the remote machine remote_job_file_name = ".%s" % os.path.basename(job_file) - self.sftp.put(job_file, os.path.join(self.sat_path, - remote_job_file_name)) + self.sftp.put(job_file, os.path.join(self.sat_path, remote_job_file_name)) except Exception as e: res = str(e) self._connection_successful = False @@ -460,26 +454,28 @@ WARNING : trying to ask if the connection to self.ssh.close() def write_info(self, logger): - '''Prints the informations relative to the machine in the logger - (terminal traces and log file) + """\ + Prints the informations relative to the machine in the logger + (terminal traces and log file) :param logger src.logger.Logger: The logger instance :return: Nothing :rtype: N\A - ''' - logger.write("host : " + self.host + "\n") - logger.write("port : " + str(self.port) + "\n") - logger.write("user : " + str(self.user) + "\n") + """ if self.successfully_connected(logger): - status = src.OK_STATUS + msg = "" else: - status = src.KO_STATUS - logger.write("Connection : " + status + "\n\n") + msg = "" + msg += "host: %s, " % self.host + msg += "port: %s, " % str(self.port) + msg += "user: %s" % str(self.user) + logger.info("Connection %s" % msg ) class Job(object): - '''Class to manage one job - ''' + """\ + Class to manage one job + """ def __init__(self, name, machine, @@ -608,22 +604,21 @@ class Job(object): return self._has_finished def get_log_files(self): - """Get the log files produced by the command launched - on the remote machine, and put it in the log directory of the user, - so they can be accessible from + """\ + Get the log files produced by the command launched + on the remote machine, and put it in the log directory of the user, + so they can be accessible from """ # Do not get the files if the command is not finished if not self.has_finished(): msg = _("Trying to get log files whereas the job is not finished.") - self.logger.write(UTS.red(msg)) + self.logger.warning(UTS.red(msg)) return # First get the file that contains the list of log files to get tmp_file_path = src.get_tmp_filename(self.config, "list_log_files.txt") remote_path = os.path.join(self.machine.sat_path, "list_log_files.txt") - self.machine.sftp.get( - remote_path, - tmp_file_path) + self.machine.sftp.get(remote_path, tmp_file_path) # Read the file and get the result of the command and all the log files # to get @@ -774,12 +769,10 @@ class Job(object): # Prevent multiple run if self.has_begun(): - msg = _("WARNING: A job can only be launched one time") - msg2 = _("Trying to launch the job \"%s\" whereas it has " - "already been launched.") % self.name - self.logger.write( - UTS.red("%s\n%s\n" % (msg,msg2)) ) - return + msg = _("A job can only be launched one time") + msg2 = _("Trying to launch the job '%s' whereas it has already been launched.") % self.name + self.logger.warning( UTS.red("%s\n%s\n" % (msg,msg2)) ) + return RCO.ReturnCode("KO", msg2) # Do not execute the command if the machine could not be reached if not self.machine.successfully_connected(self.logger): @@ -812,33 +805,31 @@ class Job(object): """\ Display on the terminal all the job's information """ - self.logger.write("name : " + self.name + "\n") - if self.after: - self.logger.write("after : %s\n" % self.after) - self.logger.write("Time elapsed : %4imin %2is \n" % - (self.total_duration()//60 , self.total_duration()%60)) + msg = "name : %s\n" % self.name + if self.after: + msg += "after : %s\n" % self.after + msg += "Time elapsed : %4imin %2is \n" % (self.total_duration()//60 , self.total_duration()%60) if self._T0 != -1: - self.logger.write("Begin time : %s\n" % - time.strftime('%Y-%m-%d %H:%M:%S', - time.localtime(self._T0)) ) + msg += "Begin time : %s\n" % + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._T0)) if self._Tf != -1: - self.logger.write("End time : %s\n\n" % - time.strftime('%Y-%m-%d %H:%M:%S', - time.localtime(self._Tf)) ) + msg += "End time : %s\n\n" % + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._Tf)) + + self.logger.info(msg) machine_head = "Informations about connection :\n" underline = (len(machine_head) - 2) * "-" - self.logger.write(UTS.info( - machine_head+underline+"\n")) + self.logger.info(machine_head+underline) self.machine.write_info(self.logger) - self.logger.write(UTS.info("out : \n")) + msg = "out : \n" if self.out == "": - self.logger.write("Unable to get output\n") + msg += "Unable to get output\n" else: - self.logger.write(self.out + "\n") - self.logger.write(UTS.info("err : \n")) - self.logger.write(self.err + "\n") + msg += self.out + "\n" + msg += "err :\n%s\n" % .err + self.logger.info(msg) def get_status(self): """\ @@ -854,14 +845,11 @@ class Job(object): if self.cancelled: return "Cancelled" if self.is_running(): - return "running since " + time.strftime('%Y-%m-%d %H:%M:%S', - time.localtime(self._T0)) + return "running since " + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._T0)) if self.has_finished(): if self.is_timeout(): - return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S', - time.localtime(self._Tf)) - return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S', - time.localtime(self._Tf)) + return "Timeout since " + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._Tf)) + return "Finished since " + time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(self._Tf)) class Jobs(object): """\ @@ -954,9 +942,10 @@ class Jobs(object): if not "machine" in job_def: msg = _("""\ -WARNING: The job '%s' do not have the key 'machine'. - This job is ignored.\n""") % job_def.name - self.logger.write(UTS.red(msg)) +The job '%s' do not have the key 'machine'. +This job is ignored. +""") % job_def.name + self.logger.warning(msg) continue name_machine = job_def.machine @@ -1009,11 +998,11 @@ WARNING: The job '%s' do not have the key 'machine'. if a_machine == None: msg = _("""\ -WARNING: The job '%(job)s' requires the machine '%(machine)s'. - This machine is not defined in the configuration file. - The job will not be launched. +The job '%(job)s' requires the machine '%(machine)s'. +This machine is not defined in the configuration file. +The job will not be launched. """) % {"job" : job_def.name, "machine" : name_machine} - self.logger.write(UTS.red(msg)) + self.logger.warning(msg) continue a_job = self.define_job(job_def, a_machine) @@ -1026,14 +1015,13 @@ WARNING: The job '%(job)s' requires the machine '%(machine)s'. self.lhosts = host_list def ssh_connection_all_machines(self, pad=50): - '''Function that do the ssh connection to every machine - to be used today. + """\ + Do the ssh connection to every machine to be used today. :return: Nothing :rtype: N\A - ''' - self.logger.write(UTS.info(( - "Establishing connection with all the machines :\n"))) + """ + self.logger.info( "Establishing connection with all the machines :\n") for machine in self.lmachines: # little algorithm in order to display traces begin_line = (_("Connection to %s: ") % machine.name) @@ -1043,26 +1031,23 @@ WARNING: The job '%(job)s' requires the machine '%(machine)s'. endline = (pad - len(begin_line)) * "." + " " step = "SSH connection" - self.logger.write( begin_line + endline + step) - self.logger.flush() + self.logger.info( begin_line + endline + step) # the call to the method that initiate the ssh connection msg = machine.connect(self.logger) # Copy salomeTools to the remote machine if machine.successfully_connected(self.logger): step = _("Remove SAT") - self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3) - self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3) + self.logger.info('\r%s%s%s' % (begin_line, endline, 20 * " ")) + self.logger.info('\r%s%s%s' % (begin_line, endline, step)) (__, out_dist, __) = machine.exec_command( - "rm -rf %s" % machine.sat_path, - self.logger) + "rm -rf %s" % machine.sat_path, self.logger) out_dist.read() - self.logger.flush() step = _("Copy SAT") - self.logger.write('\r%s%s%s' % (begin_line, endline, 20 * " "),3) - self.logger.write('\r%s%s%s' % (begin_line, endline, step), 3) - self.logger.flush() + self.logger.info('\r%s%s%s' % (begin_line, endline, 20 * " ")) + self.logger.info('\r%s%s%s' % (begin_line, endline, step)) + res_copy = machine.copy_sat(self.runner.cfg.VARS.salometoolsway, self.job_file_path) @@ -1085,19 +1070,19 @@ WARNING: The job '%(job)s' requires the machine '%(machine)s'. # Print the status of the copy if res_copy == 0: - self.logger.write('\r%s' % \ - ((len(begin_line)+len(endline)+20) * " "), 3) + self.logger.info('\r%s' % \ + ((len(begin_line)+len(endline)+20) * " ")) self.logger.info('\r%s%s%s' % (begin_line, endline, "")) else: - self.logger.write('\r%s' % \ + self.logger.info('\r%s' % \ ((len(begin_line)+len(endline)+20) * " "), 3) self.logger.info('\r%s%s%s %s' % \ (begin_line, endline, "", _("Copy of SAT failed: %s") % res_copy)) else: - self.logger.write('\r%s' % - ((len(begin_line)+len(endline)+20) * " "), 3) - self.logger.write('\r%s%s%s %s' % (begin_line, endline, "", msg)) + self.logger.info('\r%s' % + ((len(begin_line)+len(endline)+20) * " ")) + self.logger.info('\r%s%s%s %s' % (begin_line, endline, "", msg)) self.logger.info("\n") self.logger.info("\n") @@ -1191,16 +1176,15 @@ WARNING: The job '%(job)s' requires the machine '%(machine)s'. return text_out def display_status(self, len_col): - '''Takes a lenght and construct the display of the current status - of the jobs in an array that has a column for each host. - It displays the job that is currently running on the host - of the column. + """\ + Takes a lenght and construct the display of the current status + of the jobs in an array that has a column for each host. + It displays the job that is currently running on the host of the column. :param len_col int: the size of the column :return: Nothing :rtype: N\A - ''' - + """ display_line = "" for host_port in self.lhosts: jb = self.is_occupied(host_port) @@ -1211,24 +1195,22 @@ WARNING: The job '%(job)s' requires the machine '%(machine)s'. display_line += "|" + UTS.info( self.str_of_length(jb.name, len_col)) - self.logger.write("\r" + display_line + "|") - self.logger.flush() + self.logger.info("\r" + display_line + "|") def run_jobs(self): - '''The main method. Runs all the jobs on every host. - For each host, at a given time, only one job can be running. - The jobs that have the field after (that contain the job that has - to be run before it) are run after the previous job. - This method stops when all the jobs are finished. + """\ + The main method. Runs all the jobs on every host. + For each host, at a given time, only one job can be running. + The jobs that have the field after (that contain the job that has + to be run before it) are run after the previous job. + This method stops when all the jobs are finished. :return: Nothing :rtype: N\A - ''' - + """ # Print header - self.logger.write( - UTS.info(_('Executing the jobs :\n')) ) + self.logger.info(_('Executing the jobs :\n')) text_line = "" for host_port in self.lhosts: host = host_port[0] @@ -1240,10 +1222,7 @@ WARNING: The job '%(job)s' requires the machine '%(machine)s'. "("+host+", "+str(port)+")", self.len_columns) tiret_line = " " + "-"*(len(text_line)-1) + "\n" - self.logger.write(tiret_line) - self.logger.write(text_line + "|\n") - self.logger.write(tiret_line) - self.logger.flush() + self.logger.info(tiret_line + text_line + "|\n" + tiret_line) # The infinite loop that runs the jobs l_jobs_not_started = src.deepcopy_list(self.ljobs) @@ -1288,32 +1267,29 @@ WARNING: The job '%(job)s' requires the machine '%(machine)s'. # Make sure that the proc is not entirely busy time.sleep(0.001) - self.logger.write("\n") - self.logger.write(tiret_line) - self.logger.write("\n\n") + self.logger.info("\n" + tiret_line + "\n\n") if self.gui: self.gui.update_xml_files(self.ljobs) self.gui.last_update() def write_all_results(self): - '''Display all the jobs outputs. + """\ + Display all the jobs outputs. :return: Nothing :rtype: N\A - ''' - + """ for jb in self.ljobs: - self.logger.write(UTS.label( - "#------- Results for job %s -------#\n" % jb.name)) + self.logger.info("#------- Results for job %s -------#\n" % jb.name) jb.write_results() - self.logger.write("\n\n") + self.logger.info("\n\n") class Gui(object): - '''Class to manage the the xml data that can be displayed in a browser to - see the jobs states - ''' - + """\ + Class to manage the the xml data that can be displayed in a browser + to see the jobs states + """ def __init__(self, xml_dir_path, l_jobs, @@ -1321,15 +1297,14 @@ class Gui(object): prefix, logger, file_boards=""): - '''Initialization + """\ + Initialization - :param xml_dir_path str: The path to the directory where to put - the xml resulting files + :param xml_dir_path str: The path to the directory where to put the xml resulting files :param l_jobs List: the list of jobs that run today :param l_jobs_not_today List: the list of jobs that do not run today - :param file_boards str: the file path from which to read the - expected boards - ''' + :param file_boards str: the file path from which to read the expected boards + """ # The logging instance self.logger = logger @@ -1351,8 +1326,7 @@ class Gui(object): self.global_name = "global_report" xml_global_path = os.path.join(self.xml_dir_path, self.global_name + ".xml") - self.xml_global_file = src.xmlManager.XmlLogFile(xml_global_path, - "JobsReport") + self.xml_global_file = XMLMGR.XmlLogFile(xml_global_path, "JobsReport") # Find history for each job self.history = {} @@ -1373,19 +1347,19 @@ class Gui(object): :param name str: the board name ''' xml_board_path = os.path.join(self.xml_dir_path, name + ".xml") - self.d_xml_board_files[name] = src.xmlManager.XmlLogFile( - xml_board_path, - "JobsReport") + self.d_xml_board_files[name] = XMLMGR.XmlLogFile(xml_board_path,"JobsReport") self.d_xml_board_files[name].add_simple_node("distributions") self.d_xml_board_files[name].add_simple_node("applications") self.d_xml_board_files[name].add_simple_node("board", text=name) def initialize_boards(self, l_jobs, l_jobs_not_today): - '''Get all the first information needed for each file and write the - first version of the files + """\ + Get all the first information needed for each file and write the + first version of the files + :param l_jobs List: the list of jobs that run today :param l_jobs_not_today List: the list of jobs that do not run today - ''' + """ # Get the boards to fill and put it in a dictionary # {board_name : xml instance corresponding to the board} for job in l_jobs + l_jobs_not_today: @@ -1399,8 +1373,7 @@ class Gui(object): if board not in self.d_xml_board_files: self.add_xml_board(board) root_node = self.d_xml_board_files[board].xmlroot - src.xmlManager.append_node_attrib(root_node, - {"input_file" : self.file_boards}) + XMLMGR.append_node_attrib(root_node, {"input_file" : self.file_boards}) # Loop over all jobs in order to get the lines and columns for each # xml file @@ -1411,6 +1384,8 @@ class Gui(object): d_application[board] = [] l_hosts_ports = [] + + ASNODE = XMLMGR.add_simple_node # shortcut for job in l_jobs + l_jobs_not_today: @@ -1428,22 +1403,15 @@ class Gui(object): if (distrib not in [None, ''] and distrib not in d_dist[board]): d_dist[board].append(distrib) - src.xmlManager.add_simple_node( - self.d_xml_board_files[board].xmlroot.find( - 'distributions'), - "dist", - attrib={"name" : distrib}) + ASNODE( self.d_xml_board_files[board].xmlroot.find('distributions'), + "dist", attrib={"name" : distrib} ) if board_job == board: if (application not in [None, ''] and application not in d_application[board]): d_application[board].append(application) - src.xmlManager.add_simple_node( - self.d_xml_board_files[board].xmlroot.find( - 'applications'), - "application", - attrib={ - "name" : application}) + ASNODE( self.d_xml_board_files[board].xmlroot.find('applications'), + "application", attrib={"name" : application} ) # Verify that there are no missing application or distribution in the # xml board files (regarding the input boards) @@ -1453,41 +1421,30 @@ class Gui(object): continue for dist in self.d_input_boards[board]["rows"]: if dist not in l_dist: - src.xmlManager.add_simple_node( - self.d_xml_board_files[board].xmlroot.find( - 'distributions'), - "dist", - attrib={"name" : dist}) + ASNODE( self.d_xml_board_files[board].xmlroot.find('distributions'), + "dist", attrib={"name" : dist} ) l_appli = d_application[board] for appli in self.d_input_boards[board]["columns"]: if appli not in l_appli: - src.xmlManager.add_simple_node( - self.d_xml_board_files[board].xmlroot.find( - 'applications'), - "application", - attrib={"name" : appli}) + ASNODE( self.d_xml_board_files[board].xmlroot.find('applications'), + "application", attrib={"name" : appli} ) # Initialize the hosts_ports node for the global file - self.xmlhosts_ports = self.xml_global_file.add_simple_node( - "hosts_ports") + self.xmlhosts_ports = self.xml_global_file.add_simple_node( "hosts_ports") for host, port in l_hosts_ports: host_port = "%s:%i" % (host, port) - src.xmlManager.add_simple_node(self.xmlhosts_ports, - "host_port", - attrib={"name" : host_port}) + ASNODE(self.xmlhosts_ports, "host_port", attrib={"name" : host_port}) # Initialize the jobs node in all files - for xml_file in [self.xml_global_file] + list( - self.d_xml_board_files.values()): + for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()): xml_jobs = xml_file.add_simple_node("jobs") # Get the jobs present in the config file but # that will not be launched today self.put_jobs_not_today(l_jobs_not_today, xml_jobs) # add also the infos node - xml_file.add_simple_node("infos", - attrib={"name" : "last update", - "JobsCommandStatus" : "running"}) + xml_file.add_simple_node( + "infos", attrib={"name" : "last update", "JobsCommandStatus" : "running"} ) # and put the history node history_node = xml_file.add_simple_node("history") @@ -1500,10 +1457,7 @@ class Gui(object): if oExpr.search(file_name): date = os.path.basename(file_name).split("_")[0] file_path = os.path.join(self.xml_dir_path, file_name) - src.xmlManager.add_simple_node(history_node, - "link", - text=file_path, - attrib={"date" : date}) + ASNODE(history_node, "link", text=file_path, attrib={"date" : date}) # Find in each board the squares that needs to be filled regarding the @@ -1511,8 +1465,7 @@ class Gui(object): for board in self.d_input_boards.keys(): xml_root_board = self.d_xml_board_files[board].xmlroot # Find the missing jobs for today - xml_missing = src.xmlManager.add_simple_node(xml_root_board, - "missing_jobs") + xml_missing = ASNODE(xml_root_board, "missing_jobs") for row, column in self.d_input_boards[board]["jobs"]: found = False for job in l_jobs: @@ -1521,14 +1474,9 @@ class Gui(object): found = True break if not found: - src.xmlManager.add_simple_node(xml_missing, - "job", - attrib={"distribution" : row, - "application" : column }) + ASNODE(xml_missing, "job", attrib={"distribution" : row, "application" : column }) # Find the missing jobs not today - xml_missing_not_today = src.xmlManager.add_simple_node( - xml_root_board, - "missing_jobs_not_today") + xml_missing_not_today = ASNODE( xml_root_board, "missing_jobs_not_today") for row, column in self.d_input_boards[board]["jobs_not_today"]: found = False for job in l_jobs_not_today: @@ -1537,10 +1485,8 @@ class Gui(object): found = True break if not found: - src.xmlManager.add_simple_node(xml_missing_not_today, - "job", - attrib={"distribution" : row, - "application" : column }) + ASNODE( xml_missing_not_today, "job", + attrib={"distribution" : row, "application" : column } ) def find_history(self, l_jobs, l_jobs_not_today): """find, for each job, in the existent xml boards the results for the @@ -1559,13 +1505,12 @@ class Gui(object): if oExpr.search(file_name): file_path = os.path.join(self.xml_dir_path, file_name) try: - global_xml = src.xmlManager.ReadXmlFile(file_path) + global_xml = XMLMGR.ReadXmlFile(file_path) l_globalxml.append(global_xml) except Exception as e: - msg = _("WARNING: the file '%(1)s' can not be read, it will be " - "ignored\n%(2)s") % {"1": file_path, "2": e} - self.logger.write("%s\n" % UTS.red( - msg), 5) + msg = _("The file '%s' can not be read, it will be ignored\n%s") % \ + (file_path, e}) + self.logger.warning("%s\n" % msg) # Construct the dictionnary self.history for job in l_jobs + l_jobs_not_today: @@ -1573,11 +1518,8 @@ class Gui(object): for global_xml in l_globalxml: date = os.path.basename(global_xml.filePath).split("_")[0] global_root_node = global_xml.xmlroot.find("jobs") - job_node = src.xmlManager.find_node_by_attrib( - global_root_node, - "job", - "name", - job.name) + job_node = XMLMGR.find_node_by_attrib( + global_root_node, "job", "name", job.name ) if job_node: if job_node.find("remote_log_file_path") is not None: link = job_node.find("remote_log_file_path").text @@ -1588,47 +1530,37 @@ class Gui(object): self.history[job.name] = l_links def put_jobs_not_today(self, l_jobs_not_today, xml_node_jobs): - '''Get all the first information needed for each file and write the - first version of the files + """\ + Get all the first information needed for each file and write the + first version of the files :param xml_node_jobs etree.Element: the node corresponding to a job :param l_jobs_not_today List: the list of jobs that do not run today - ''' + """ + + ASNODE = XMLMGR.add_simple_node # shortcut + for job in l_jobs_not_today: - xmlj = src.xmlManager.add_simple_node(xml_node_jobs, - "job", - attrib={"name" : job.name}) - src.xmlManager.add_simple_node(xmlj, "application", job.application) - src.xmlManager.add_simple_node(xmlj, - "distribution", - job.machine.distribution) - src.xmlManager.add_simple_node(xmlj, "board", job.board) - src.xmlManager.add_simple_node(xmlj, - "commands", " ; ".join(job.commands)) - src.xmlManager.add_simple_node(xmlj, "state", "Not today") - src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name) - src.xmlManager.add_simple_node(xmlj, "host", job.machine.host) - src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port)) - src.xmlManager.add_simple_node(xmlj, "user", job.machine.user) - src.xmlManager.add_simple_node(xmlj, "sat_path", - job.machine.sat_path) - xml_history = src.xmlManager.add_simple_node(xmlj, "history") + xmlj = ASNODE(xml_node_jobs, "job", attrib={"name" : job.name}) + ASNODE(xmlj, "application", job.application) + ASNODE(xmlj, "distribution", job.machine.distribution) + ASNODE(xmlj, "board", job.board) + ASNODE(xmlj, "commands", " ; ".join(job.commands)) + ASNODE(xmlj, "state", "Not today") + ASNODE(xmlj, "machine", job.machine.name) + ASNODE(xmlj, "host", job.machine.host) + ASNODE(xmlj, "port", str(job.machine.port)) + ASNODE(xmlj, "user", job.machine.user) + ASNODE(xmlj, "sat_path", job.machine.sat_path) + xml_history = ASNODE(xmlj, "history") for i, (date, res_job, link) in enumerate(self.history[job.name]): if i==0: # tag the first one (the last one) - src.xmlManager.add_simple_node(xml_history, - "link", - text=link, - attrib={"date" : date, - "res" : res_job, - "last" : "yes"}) + ASNODE( xml_history, "link", text=link, + attrib={"date" : date, "res" : res_job, "last" : "yes"} ) else: - src.xmlManager.add_simple_node(xml_history, - "link", - text=link, - attrib={"date" : date, - "res" : res_job, - "last" : "no"}) + ASNODE( xml_history, "link", text=link, + attrib={"date" : date, "res" : res_job, "last" : "no"} ) def parse_csv_boards(self, today): """ Parse the csv file that describes the boards to produce and fill @@ -1719,56 +1651,41 @@ class Gui(object): time.localtime(job._Tf)) # recreate the job node - xmlj = src.xmlManager.add_simple_node(xml_node_jobs, - "job", - attrib={"name" : job.name}) - src.xmlManager.add_simple_node(xmlj, "machine", job.machine.name) - src.xmlManager.add_simple_node(xmlj, "host", job.machine.host) - src.xmlManager.add_simple_node(xmlj, "port", str(job.machine.port)) - src.xmlManager.add_simple_node(xmlj, "user", job.machine.user) - xml_history = src.xmlManager.add_simple_node(xmlj, "history") + xmlj = ASNODE(xml_node_jobs, "job", attrib={"name" : job.name}) + ASNODE(xmlj, "machine", job.machine.name) + ASNODE(xmlj, "host", job.machine.host) + ASNODE(xmlj, "port", str(job.machine.port)) + ASNODE(xmlj, "user", job.machine.user) + xml_history = ASNODE(xmlj, "history") for date, res_job, link in self.history[job.name]: - src.xmlManager.add_simple_node(xml_history, - "link", - text=link, - attrib={"date" : date, - "res" : res_job}) + ASNODE( xml_history, "link", text=link, + attrib={"date" : date, "res" : res_job} ) - src.xmlManager.add_simple_node(xmlj, "sat_path", - job.machine.sat_path) - src.xmlManager.add_simple_node(xmlj, "application", job.application) - src.xmlManager.add_simple_node(xmlj, "distribution", - job.machine.distribution) - src.xmlManager.add_simple_node(xmlj, "board", job.board) - src.xmlManager.add_simple_node(xmlj, "timeout", str(job.timeout)) - src.xmlManager.add_simple_node(xmlj, "commands", - " ; ".join(job.commands)) - src.xmlManager.add_simple_node(xmlj, "state", job.get_status()) - src.xmlManager.add_simple_node(xmlj, "begin", T0) - src.xmlManager.add_simple_node(xmlj, "end", Tf) - src.xmlManager.add_simple_node(xmlj, "out", UTS.cleancolor(job.out)) - src.xmlManager.add_simple_node(xmlj, "err", UTS.cleancolor(job.err)) - src.xmlManager.add_simple_node(xmlj, "res", str(job.res_job)) + ASNODE(xmlj, "sat_path", job.machine.sat_path) + ASNODE(xmlj, "application", job.application) + ASNODE(xmlj, "distribution", job.machine.distribution) + ASNODE(xmlj, "board", job.board) + ASNODE(xmlj, "timeout", str(job.timeout)) + ASNODE(xmlj, "commands", " ; ".join(job.commands)) + ASNODE(xmlj, "state", job.get_status()) + ASNODE(xmlj, "begin", T0) + ASNODE(xmlj, "end", Tf) + ASNODE(xmlj, "out", UTS.cleancolor(job.out)) + ASNODE(xmlj, "err", UTS.cleancolor(job.err)) + ASNODE(xmlj, "res", str(job.res_job)) if len(job.remote_log_files) > 0: - src.xmlManager.add_simple_node(xmlj, - "remote_log_file_path", - job.remote_log_files[0]) + ASNODE(xmlj, "remote_log_file_path", job.remote_log_files[0]) else: - src.xmlManager.add_simple_node(xmlj, - "remote_log_file_path", - "nothing") + ASNODE(xmlj, "remote_log_file_path", "nothing") # Search for the test log if there is any l_test_log_files = self.find_test_log(job.remote_log_files) - xml_test = src.xmlManager.add_simple_node(xmlj, - "test_log_file_path") + xml_test = ASNODE(xmlj, "test_log_file_path") for test_log_path, res_test, nb_fails in l_test_log_files: - test_path_node = src.xmlManager.add_simple_node(xml_test, - "path", - test_log_path) + test_path_node = ASNODE(xml_test, "path", test_log_path) test_path_node.attrib["res"] = res_test test_path_node.attrib["nb_fails"] = nb_fails - xmlafter = src.xmlManager.add_simple_node(xmlj, "after", job.after) + xmlafter = ASNODE(xmlj, "after", job.after) # get the job father if job.after is not None: job_father = None @@ -1781,7 +1698,7 @@ class Gui(object): link = job_father.remote_log_files[0] else: link = "nothing" - src.xmlManager.append_node_attrib(xmlafter, {"link" : link}) + XMLMGR.append_node_attrib(xmlafter, {"link" : link}) # Verify that the job is to be done today regarding the input csv # files @@ -1791,21 +1708,16 @@ class Gui(object): if (job.machine.distribution == dist and job.application == appli): found = True - src.xmlManager.add_simple_node(xmlj, - "extra_job", - "no") + ASNODE(xmlj, "extra_job", "no") break if not found: - src.xmlManager.add_simple_node(xmlj, - "extra_job", - "yes") + ASNODE(xmlj, "extra_job", "yes") # Update the date xml_node_infos = xml_file.xmlroot.find('infos') - src.xmlManager.append_node_attrib(xml_node_infos, - attrib={"value" : - datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")}) + XMLMGR.append_node_attrib( xml_node_infos, + attrib={"value" : datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")} ) def find_test_log(self, l_remote_log_files): @@ -1821,7 +1733,7 @@ class Gui(object): for file_path in l_remote_log_files: dirname = os.path.basename(os.path.dirname(file_path)) file_name = os.path.basename(file_path) - regex = src.logger.log_all_command_file_expression + regex = UTS._log_all_command_file_expression oExpr = re.compile(regex) if dirname == "TEST" and oExpr.search(file_name): # find the res of the command @@ -1844,7 +1756,7 @@ class Gui(object): ''' for xml_file in [self.xml_global_file] + list(self.d_xml_board_files.values()): xml_node_infos = xml_file.xmlroot.find('infos') - src.xmlManager.append_node_attrib(xml_node_infos, + XMLMGR.append_node_attrib(xml_node_infos, attrib={"JobsCommandStatus" : finish_status}) # Write the file self.write_xml_files() diff --git a/commands/launcher.py b/commands/launcher.py index 032af1e..898207e 100644 --- a/commands/launcher.py +++ b/commands/launcher.py @@ -161,9 +161,9 @@ def generate_launch_file(config, # Display some information if display: # Write the launcher file - logger.write(_("Generating launcher for %s :\n") % - UTS.label(config.VARS.application), 1) - logger.write(" %s\n" % UTS.label(filepath), 1) + msg = _("Generating launcher for %s :\n %s\n") % \ + (UTS.label(config.VARS.application), UTS.label(filepath)) + logger.info(msg) # open the file and write into it launch_file = open(filepath, "w") diff --git a/commands/log.py b/commands/log.py index 2ba5ea0..0c5ccdb 100644 --- a/commands/log.py +++ b/commands/log.py @@ -28,8 +28,9 @@ import stat import src.debug as DBG import src.returnCode as RCO import src.utilsSat as UTS -from src.salomeTools import _BaseCommand +import src.xmlManager as XMLMGR import src.system as SYSS +from src.salomeTools import _BaseCommand # Compatibility python 2/3 for input function # input stays input for python 3 and input = raw_input for python 2 @@ -110,7 +111,7 @@ class Command(_BaseCommand): if options.clean: nbClean = options.clean # get the list of files to remove - lLogs = UTS.list_log_file(logDir, UTS.log_all_command_file_expression) + lLogs = UTS.list_log_file(logDir, UTS._log_all_command_file_expression) nbLogFiles = len(lLogs) # Delete all if the invoked number is bigger than the number of log files if nbClean > nbLogFiles: @@ -204,8 +205,7 @@ class Command(_BaseCommand): # loop on all files and print it with date, time and command name for __, date, hour, cmd, cmdAppli in lLogsFiltered: num = UTS.label("%2d" % (nb_logs - index)) - logger.write("%s: %13s %s %s %s\n" % - (num, cmd, date, hour, cmdAppli), 1, False) + logger.info("%s: %13s %s %s %s\n" % (num, cmd, date, hour, cmdAppli)) index += 1 # ask the user what for what command he wants to be displayed @@ -273,8 +273,7 @@ def remove_log_file(filePath, logger): :param logger Logger: the logger instance to use for the print ''' if os.path.exists(filePath): - logger.write(UTS.red("Removing ") - + filePath + "\n", 5) + logger.debug(UTS.red("Removing %s\n" % filePath)) os.remove(filePath) def print_log_command_in_terminal(filePath, logger): @@ -286,21 +285,21 @@ def print_log_command_in_terminal(filePath, logger): ''' logger.debug(_("Reading %s\n") % filePath) # Instantiate the ReadXmlFile class that reads xml files - xmlRead = src.xmlManager.ReadXmlFile(filePath) + xmlRead = XMLMGR.ReadXmlFile(filePath) # Get the attributes containing the context (user, OS, time, etc..) dAttrText = xmlRead.get_attrib('Site') # format dAttrText and print the context lAttrText = [] for attrib in dAttrText: lAttrText.append((attrib, dAttrText[attrib])) - logger.write("\n", 1) + UTS.logger_info_tuples(logger, lAttrText) # Get the traces command_traces = xmlRead.get_node_text('Log') # Print it if there is any if command_traces: - logger.info(UTS.header(_("Here are the command traces :\n"))) - logger.info(command_traces + "\n" ) + msg = _("Here are the command traces :\n%s\n") % command_traces + logger.info(msg) def getMaxFormat(aListOfStr, offset=1): """returns format for columns width as '%-30s"' for example""" @@ -314,7 +313,7 @@ def show_last_logs(logger, config, log_dirs): # list the logs nb = len(log_dirs) fmt1, maxLen = getMaxFormat(log_dirs, offset=1) - fmt2 = "%s: " + fmt1 # "%s: %-30s" for example + fmt2 = "%s: " + fmt1 + "\n" # "%s: %-30s\n" for example nb_cols = 5 # line ~ no more 100 chars if maxLen > 20: nb_cols = 4 @@ -323,14 +322,15 @@ def show_last_logs(logger, config, log_dirs): if maxLen > 50: nb_cols = 1 col_size = (nb / nb_cols) + 1 for index in range(0, col_size): + msg = "" for i in range(0, nb_cols): k = index + i * col_size if k < nb: l = log_dirs[k] str_indice = UTS.label("%2d" % (k+1)) log_name = l - logger.write(fmt2 % (str_indice, log_name), 1, False) - logger.write("\n", 1, False) + msg += fmt2 % (str_indice, log_name) + logger.info(msg + "\n") # loop till exit x = -1 @@ -357,8 +357,8 @@ def show_product_last_logs(logger, config, product_log_dir): opt.append(str(datetime.datetime.fromtimestamp(my_stat[stat.ST_MTIME]))) opt.append("(%8.2f)" % (my_stat[stat.ST_SIZE] / 1024.0)) - logger.write(" %-35s" % " ".join(opt), 1, False) - logger.write("%s: %-30s\n" % (str_indice, file_name), 1, False) + logger.info(" %-35s" % " ".join(opt)) + logger.info("%s: %-30s\n" % (str_indice, file_name)) # loop till exit x = -1 diff --git a/commands/make.py b/commands/make.py index 02762f7..664c937 100644 --- a/commands/make.py +++ b/commands/make.py @@ -22,6 +22,7 @@ import re import src.debug as DBG import src.returnCode as RCO +import src.utilsSat as UTS from src.salomeTools import _BaseCommand ######################################################################## @@ -75,12 +76,11 @@ class Command(_BaseCommand): products_infos = get_products_list(options, config, logger) # Print some informations - logger.write( - _('Executing the make command in the build directories of the application %s\n') % - UTS.label(config.VARS.application), 1) + logger.info( + _('Executing the make command in the build directories of the application %s\n') % \ + UTS.label(config.VARS.application)) - info = [(_("BUILD directory"), - os.path.join(config.APPLICATION.workdir, 'BUILD'))] + info = [(_("BUILD directory"), os.path.join(config.APPLICATION.workdir, 'BUILD'))] UTS.logger_info_tuples(logger, info) # Call the function that will loop over all the products and execute @@ -92,14 +92,14 @@ class Command(_BaseCommand): # Print the final state nb_products = len(products_infos) if res == 0: - final_status = "" + final_status = "OK" else: - final_status = "" + final_status = "KO" - logger.info(_("\nMake: %s (%d/%d)\n") % \ - (final_status, nb_products - res, nb_products)) + msg = _("\nMake: <%s> (%d/%d)\n") % (final_status, nb_products - res, nb_products) + logger.info(msg) - return res + return RCO.ReturnCode(final_status, msg) def get_products_list(options, cfg, logger): @@ -139,10 +139,10 @@ def get_products_list(options, cfg, logger): return products_infos def log_step(logger, header, step): - logger.write("\r%s%s" % (header, " " * 20), 3) - logger.write("\r%s%s" % (header, step), 3) - logger.write("\n==== %s \n" % UTS.info(step), 4) - logger.flush() + msg = "\r%s%s" % (header, " " * 20) + msg += "\r%s%s" % (header, step) + logger.info(msg) + logger.debug("\n==== %s \n" % UTS.info(step)) def log_res_step(logger, res): if res == 0: @@ -186,19 +186,15 @@ def make_product(p_name_info, make_option, config, logger): p_name, p_info = p_name_info # Logging - logger.write("\n", 4, False) - logger.write("################ ", 4) header = _("Make of %s") % UTS.label(p_name) header += " %s " % ("." * (20 - len(p_name))) - logger.write(header, 3) - logger.write("\n", 4, False) - logger.flush() + logger.info(header) # Do nothing if he product is not compilable - if ("properties" in p_info and "compilation" in p_info.properties and - p_info.properties.compilation == "no"): + if ("properties" in p_info and \ + "compilation" in p_info.properties and \ + p_info.properties.compilation == "no"): log_step(logger, header, "ignored") - logger.write("\n", 3, False) return 0 # Instantiate the class that manages all the construction commands @@ -224,15 +220,14 @@ def make_product(p_name_info, make_option, config, logger): # Log the result if res > 0: - logger.write("\r%s%s" % (header, " " * len_end_line), 3) - logger.write("\r" + header + "") + logger.info("\r%s%s" % (header, " " * len_end_line)) + logger.info("\r" + header + "") logger.debug("==== in make of %s\n" % p_name) else: - logger.write("\r%s%s" % (header, " " * len_end_line), 3) - logger.write("\r" + header + "") + logger.info("\r%s%s" % (header, " " * len_end_line)) + logger.info("\r" + header + "") logger.debug("==== in make of %s\n" % p_name) - logger.write("\n") - + logger.info("\n") return res def get_nb_proc(product_info, config, make_option): diff --git a/commands/makeinstall.py b/commands/makeinstall.py index ce7cd15..2d0f79b 100644 --- a/commands/makeinstall.py +++ b/commands/makeinstall.py @@ -73,8 +73,8 @@ class Command(_BaseCommand): products_infos = get_products_list(options, config, logger) # Print some informations - logger.write(_('Executing the make install command in the build directories of the application %s\n') % - UTS.label(config.VARS.application), 1) + logger.info(_('Executing the make install command in the build directories of the application %s\n') % \ + UTS.label(config.VARS.application)) info = [(_("BUILD directory"), os.path.join(config.APPLICATION.workdir, 'BUILD'))] @@ -87,14 +87,14 @@ class Command(_BaseCommand): # Print the final state nb_products = len(products_infos) if res == 0: - final_status = "" + final_status = "OK" else: - final_status = "" + final_status = "KO" - logger.info(_("\nMake install: %s (%d/%d)\n") % \ - (final_status, nb_products - res, nb_products)) + msg = _("\nMake install: <%s> (%d/%d)\n") % (final_status, nb_products - res, nb_products) + logger.info(msg) - return res + return RCO.ReturnCode(final_status, msg) def get_products_list(options, cfg, logger): @@ -131,10 +131,9 @@ def get_products_list(options, cfg, logger): return products_infos def log_step(logger, header, step): - logger.write("\r%s%s" % (header, " " * 20), 3) - logger.write("\r%s%s" % (header, step), 3) - logger.write("\n==== %s \n" % UTS.info(step), 4) - logger.flush() + logger.info("\r%s%s" % (header, " " * 20), 3) + logger.info("\r%s%s" % (header, step), 3) + logger.debug("\n==== %s \n" % UTS.info(step), 4) def log_res_step(logger, res): if res == 0: @@ -175,20 +174,16 @@ def makeinstall_product(p_name_info, config, logger): p_name, p_info = p_name_info # Logging - logger.write("\n", 4, False) - logger.write("################ ", 4) header = _("Make install of %s") % UTS.label(p_name) header += " %s " % ("." * (20 - len(p_name))) - logger.write(header, 3) - logger.write("\n", 4, False) - logger.flush() + logger.info(header) # Do nothing if he product is not compilable - if ("properties" in p_info and "compilation" in p_info.properties and - p_info.properties.compilation == "no"): + if ("properties" in p_info and \ + "compilation" in p_info.properties and \ + p_info.properties.compilation == "no"): log_step(logger, header, "ignored") - logger.write("\n", 3, False) - return 0 + return RCO.ReturnCode("OK", "product %s is not compilable" % p_name) # Instantiate the class that manages all the construction commands # like cmake, make, make install, make test, environment management, etc... @@ -210,13 +205,13 @@ def makeinstall_product(p_name_info, config, logger): # Log the result if res > 0: - logger.write("\r%s%s" % (header, " " * 20), 3) - logger.write("\r" + header + "") - logger.error("==== in make install of s\n" % p_name) + logger.info("\r%s%s" % (header, " " * 20)) + logger.info("\r" + header + "") + logger.debug("==== in make install of s\n" % p_name) else: - logger.write("\r%s%s" % (header, " " * 20), 3) - logger.write("\r" + header + "") - logger.write("==== in make install of %s\n" % p_name) - logger.write("\n", 3, False) + logger.info("\r%s%s" % (header, " " * 20)) + logger.info("\r" + header + "") + logger.debug("==== in make install of %s\n" % p_name) + logger.info("\n") return res diff --git a/commands/package.py b/commands/package.py index f730403..b424b89 100644 --- a/commands/package.py +++ b/commands/package.py @@ -182,7 +182,7 @@ Use one of the following options: src.check_config_has_application(config) # Display information - logger.write(_("Packaging application %s\n") % \ + logger.info(_("Packaging application %s\n") % \ UTS.label(config.VARS.application), 1) # Get the default directory where to put the packages @@ -257,17 +257,12 @@ check if at least one of the following options was selected: # Create a working directory for all files that are produced during the # package creation and that will be removed at the end of the command - tmp_working_dir = os.path.join(config.VARS.tmp_root, - config.VARS.datehour) + tmp_working_dir = os.path.join(config.VARS.tmp_root, config.VARS.datehour) UTS.ensure_path_exists(tmp_working_dir) - logger.write("\n", 5) - logger.write(_("The temporary working directory: %s\n") % tmp_working_dir, 5) + logger.debug(_("The temporary working directory: %s\n") % tmp_working_dir) - logger.write("\n", 3) - msg = _("Preparation of files to add to the archive") - logger.write(UTS.label(msg), 2) - logger.write("\n", 2) + logger.info(UTS.label(msg)) d_files_to_add={} # content of the archive @@ -275,10 +270,7 @@ check if at least one of the following options was selected: d_paths_to_substitute={} if options.binaries: - d_bin_files_to_add = binary_package(config, - logger, - options, - tmp_working_dir) + d_bin_files_to_add = binary_package(config, logger, options, tmp_working_dir) # for all binaries dir, store the substitution that will be required # for extra compilations for key in d_bin_files_to_add: @@ -308,9 +300,8 @@ check if at least one of the following options was selected: d_paths_to_substitute, "install_bin.sh") d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")}) - logger.write("substitutions that need to be done later : \n", 5) - logger.write(str(d_paths_to_substitute), 5) - logger.write("\n", 5) + logger.debug("substitutions to be done later:\n%s\n" % str(d_paths_to_substitute)) + else: # --salomeTool option is not considered when --sources is selected, as this option # already brings salomeTool! @@ -322,7 +313,7 @@ check if at least one of the following options was selected: d_files_to_add.update(project_package(options.project, tmp_working_dir)) if not(d_files_to_add): - msg = _("Empty dictionnary to build the archive.\n") + msg = _("Empty dictionary to build the archive.\n") logger.error(msg) return 1 @@ -334,15 +325,13 @@ check if at least one of the following options was selected: if options.add_files: for file_path in options.add_files: if not os.path.exists(file_path): - msg = _("WARNING: the file %s is not accessible.\n") % file_path + msg = _("The file %s is not accessible.\n") % file_path continue file_name = os.path.basename(file_path) d_files_to_add[file_name] = (file_path, file_name) - logger.write("\n", 2) - - logger.write(UTS.label(_("Actually do the package")), 2) - logger.write("\n", 2) + msg = UTS.label(_("Actually do the package")) + logger.info("\n%s\n" % msg) try: # Creating the object tarfile @@ -373,8 +362,9 @@ check if at least one of the following options was selected: def add_files(tar, name_archive, d_content, logger, f_exclude=None): - '''Create an archive containing all directories and files that are given in - the d_content argument. + """\ + Create an archive containing all directories and files that are given + in the d_content argument. :param tar tarfile: The tarfile instance used to make the archive. :param name_archive str: The name of the archive to make. @@ -386,7 +376,7 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None): :param f_exclude Function: the function that filters :return: 0 if success, 1 if not. :rtype: int - ''' + """ # get the max length of the messages in order to make the display max_len = len(max(d_content.keys(), key=len)) @@ -395,7 +385,7 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None): for name in d_content.keys(): # display information len_points = max_len - len(name) - logger.write(name + " " + len_points * "." + " ", 3) + logger.info(name + " " + len_points * "." + " ") # Get the local path and the path in archive # of the directory or file to add local_path, archive_path = d_content[name] @@ -410,13 +400,14 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None): return success def exclude_VCS_and_extensions(filename): - ''' The function that is used to exclude from package the link to the - VCS repositories (like .git) + """\ + The function that is used to exclude from package the link to the + VCS repositories (like .git) :param filename Str: The filname to exclude (or not). :return: True if the file has to be exclude :rtype: Boolean - ''' + """ for dir_name in IGNORED_DIRS: if dir_name in filename: return True @@ -853,27 +844,25 @@ def source_package(sat, config, logger, options, tmp_working_dir): ''' # Get all the products that are prepared using an archive - logger.write("Find archive products ... ") + logger.info("Find archive products ... ") d_archives, l_pinfo_vcs = get_archives(config, logger) - logger.write("Done\n") + logger.info("Done\n") d_archives_vcs = {} if not options.with_vcs and len(l_pinfo_vcs) > 0: # Make archives with the products that are not prepared using an archive # (git, cvs, svn, etc) - logger.write("Construct archives for vcs products ... ") + logger.info("Construct archives for vcs products ... ") d_archives_vcs = get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir) - logger.write("Done\n") + logger.info("Done\n") # Create a project - logger.write("Create the project ... ") - d_project = create_project_for_src_package(config, - tmp_working_dir, - options.with_vcs) - logger.write("Done\n") + logger.info("Create the project ... ") + d_project = create_project_for_src_package(config, tmp_working_dir, options.with_vcs) + logger.info("Done\n") # Add salomeTools tmp_sat = add_salomeTools(config, tmp_working_dir) @@ -991,13 +980,13 @@ def get_archives_vcs(l_pinfo_vcs, sat, config, logger, tmp_working_dir): # command and thus construct an archive that will not contain the patches l_prod_names = [pn for pn, __ in l_pinfo_vcs] # clean - logger.write(_("clean sources\n")) + logger.info(_("clean sources\n")) args_clean = config.VARS.application args_clean += " --sources --products " args_clean += ",".join(l_prod_names) sat.clean(args_clean, batch=True, verbose=0, logger_add_link = logger) # source - logger.write(_("get sources")) + logger.info(_("get sources")) args_source = config.VARS.application args_source += " --products " args_source += ",".join(l_prod_names) diff --git a/commands/patch.py b/commands/patch.py index 97e98b4..b802885 100644 --- a/commands/patch.py +++ b/commands/patch.py @@ -94,66 +94,65 @@ class Command(_BaseCommand): product_info, max_product_name_len, logger) - logger.write(patch_res, 1, False) + logger.info(patch_res) if return_code: good_result += 1 # Display the results (how much passed, how much failed, etc...) - logger.write("\n", 2, False) + logger.info("\n") if good_result == len(products_infos): - status = "" + status = "OK" else: - status = "" + status = "KO" # write results - logger.info(_("\nPatching sources of the application: %s (%d/%d)\n") % \ - (status, good_result, len(products_infos))) + msg = ("\nPatching sources of the application: <%s> (%d/%d)\n") % \ + (status, good_result, len(products_infos)) + logger.info(msg) - return len(products_infos) - good_result + return RCO.ReturnCode(status, msg) def apply_patch(config, product_info, max_product_name_len, logger): - '''The method called to apply patches on a product + """\ + The method called to apply patches on a product :param config Config: The global configuration :param product_info Config: The configuration specific to the product to be patched :param logger Logger: The logger instance to use for the display and logging - :return: (True if it succeed, else False, message to display) - :rtype: (boolean, str) - ''' + :return: RCO.ReturnCode + """ # if the product is native, do not apply patch if src.product.product_is_native(product_info): # display and log - logger.write('%s: ' % UTS.label(product_info.name), 4) - logger.write(' ' * (max_product_name_len - len(product_info.name)), 4, False) - logger.write("\n", 4, False) - msg = _("The %s product is native. Do not apply any patch.") % product_info.name - logger.write(msg, 4) - logger.write("\n", 4) - return True, "" + logger.info('%s: ' % UTS.label(product_info.name)) + logger.info(' ' * (max_product_name_len - len(product_info.name))) + logger.info("\n") + msg = _("The %s product is native. Do not apply any patch") % product_info.name + logger.info(msg + "\n") + return RCO.ReturnCode("OK", msg) if not "patches" in product_info or len(product_info.patches) == 0: # display and log - logger.write('%s: ' % UTS.label(product_info.name), 4) - logger.write(' ' * (max_product_name_len - len(product_info.name)), 4, False) - logger.write("\n", 4, False) + logger.info('%s: ' % UTS.label(product_info.name)) + logger.info(' ' * (max_product_name_len - len(product_info.name))) + logger.info("\n") msg = _("No patch for the %s product") % product_info.name - logger.write(msg, 4) - logger.write("\n", 4) - return True, "" + logger.info(msg + "\n") + return RCO.ReturnCode("OK", msg) else: # display and log - logger.write('%s: ' % UTS.label(product_info.name), 3) - logger.write(' ' * (max_product_name_len - len(product_info.name)), 3, False) - logger.write("\n", 4, False) + logger.info('%s: ' % UTS.label(product_info.name)) + logger.info(' ' * (max_product_name_len - len(product_info.name))) + logger.info("\n") if not os.path.exists(product_info.source_dir): - msg = _("No sources found for the %s product\n") % product_info.name - logger.write(UTS.red(msg), 1) - return False, "" + msg = _("No sources found for the %s product") % product_info.name + logger.error(UTS.red(msg)) + return RCO.ReturnCode("KO", msg) # At this point, there one or more patches and the source directory exists retcode = [] @@ -167,7 +166,7 @@ def apply_patch(config, product_info, max_product_name_len, logger): patch_cmd = "patch -p1 < %s" % patch # Write the command in the terminal if verbose level is at 5 - logger.write((" >%s\n" % patch_cmd),5) + logger.info(" >%s\n" % patch_cmd) # Write the command in the log file (can be seen using 'sat log') logger.logTxtFile.write("\n >%s\n" % patch_cmd) @@ -201,6 +200,9 @@ def apply_patch(config, product_info, max_product_name_len, logger): if len(details) > 0: retcode.extend(details) - res = not (False in res) + if False in res: + rc = "KO" + else: + rc = "OK" - return res, "\n".join(retcode) + "\n" + return RCO.ReturnCode(rc, "\n".join(retcode)) diff --git a/commands/prepare.py b/commands/prepare.py index 7aa4e3e..859516e 100644 --- a/commands/prepare.py +++ b/commands/prepare.py @@ -94,26 +94,28 @@ class Command(_BaseCommand): if not options.force and len(ldev_products) > 0: l_products_not_getted = find_products_already_getted(ldev_products) if len(l_products_not_getted) > 0: - msg = _("Do not get the source of the following products in development mode\n" - " Use the --force option to overwrite it.\n") - logger.write(UTS.red(msg), 1) + msg = _("""\ +Do not get the source of the following products in development mode. +Use the --force option to overwrite it. +""") + logger.error(UTS.red(msg)) args_product_opt_clean = remove_products(args_product_opt_clean, l_products_not_getted, logger) - logger.write("\n", 1) args_product_opt_patch = args_product_opt if not options.force_patch and len(ldev_products) > 0: l_products_with_patchs = find_products_with_patchs(ldev_products) if len(l_products_with_patchs) > 0: - msg = _("do not patch the following products in development mode\n" - " Use the --force_patch option to overwrite it.\n") - logger.write(UTS.red(msg), 1) + msg = _(""" +Do not patch the following products in development mode. +Use the --force_patch option to overwrite it. +""") + logger.error(UTS.red(msg)) args_product_opt_patch = remove_products(args_product_opt_patch, l_products_with_patchs, logger) - logger.write("\n", 1) # Construct the final commands arguments args_clean = args_appli + args_product_opt_clean + " --sources" @@ -138,25 +140,25 @@ class Command(_BaseCommand): # Call the commands using the API if do_clean: msg = _("Clean the source directories ...") - logger.write(msg, 3) - logger.flush() + logger.info(msg) DBG.tofix("args_clean and TODO remove returns", args_clean, True) res_clean = runner.getCommand("clean").run(args_clean) return res_clean + res_source + res_patch if do_source: msg = _("Get the sources of the products ...") - logger.write(msg, 5) + logger.debug(msg) res_source = runner.getCommand("source").run(args_source) if do_patch: msg = _("Patch the product sources (if any) ...") - logger.write(msg, 5) + logger.debug(msg) res_patch = runner.getCommand("patch").run(args_patch) return res_clean + res_source + res_patch def remove_products(arguments, l_products_info, logger): - '''function that removes the products in l_products_info from arguments list. + """ + function that removes the products in l_products_info from arguments list. :param arguments str: The arguments from which to remove products :param l_products_info list: List of @@ -164,14 +166,14 @@ def remove_products(arguments, l_products_info, logger): :param logger Logger: The logger instance to use for the display and logging :return: The updated arguments. :rtype: str - ''' + """ args = arguments for i, (product_name, __) in enumerate(l_products_info): args = args.replace(',' + product_name, '') end_text = ', ' if i+1 == len(l_products_info): end_text = '\n' - logger.write(product_name + end_text, 1) + logger.info(product_name + end_text) return args def find_products_already_getted(l_products): diff --git a/commands/profile.py b/commands/profile.py index 2043e06..e22b4af 100644 --- a/commands/profile.py +++ b/commands/profile.py @@ -91,8 +91,8 @@ class Command(_BaseCommand): if options.prefix is None: msg = _("The --%s argument is required\n") % "prefix" - logger.write(UTS.red(msg), 1) - return 1 + logger.error(msg) + return RCO.ReturnCode("KO", msg) retcode = generate_profile_sources(config, options, logger) @@ -148,9 +148,10 @@ def get_profile_name ( options, config ): res = config.APPLICATION.name + "_PROFILE" return res -## -# Generates the sources of the profile def generate_profile_sources( config, options, logger ): + """ + Generates the sources of the profile + """ #Check script app-quickstart.py exists kernel_cfg = src.product.get_product_config(config, "KERNEL") kernel_root_dir = kernel_cfg.install_dir @@ -158,8 +159,7 @@ def generate_profile_sources( config, options, logger ): raise Exception(_("KERNEL is not installed")) script = os.path.join(kernel_root_dir,"bin","salome","app-quickstart.py") if not os.path.exists( script ): - raise Exception( - _("KERNEL's install has not the script app-quickstart.py") ) + raise Exception( _("KERNEL's install has not the script app-quickstart.py") ) # Check that GUI is installed gui_cfg = src.product.get_product_config(config, "GUI") @@ -188,7 +188,7 @@ def generate_profile_sources( config, options, logger ): command += " --force" if options.slogan : command += " --slogan=%s" % options.slogan - logger.write("\n>" + command + "\n", 5, False) + logger.debug("\n>" + command + "\n") #Run command os.environ["KERNEL_ROOT_DIR"] = kernel_root_dir @@ -202,13 +202,14 @@ def generate_profile_sources( config, options, logger ): if res != 0: raise Exception(_("Cannot create application, code = %d\n") % res) else: - logger.write( - _("Profile sources were generated in directory %s.\n" % prefix), 3 ) + logger.info( _("Profile sources were generated in directory %s.\n" % prefix) ) return res -## -# Updates the pyconf + def update_pyconf( config, options, logger ): + """ + Updates the pyconf + """ #Save previous version pyconf = config.VARS.product + '.pyconf' diff --git a/commands/run.py b/commands/run.py index c859b45..55087d9 100644 --- a/commands/run.py +++ b/commands/run.py @@ -98,11 +98,10 @@ Did you run the command 'sat launcher' ?\n""") % launcher_path stdout=logger.logTxtFile, stderr=subprocess.STDOUT) - # Display information : how to get the logs - messageFirstPart = _("\nEnd of execution. To see the traces, " - "please tap the following command :\n") - messageSecondPart = UTS.label( config.VARS.salometoolsway + os.sep + - "sat log " + config.VARS.application + "\n") - logger.write(" %s\n" %(messageFirstPart + messageSecondPart), 2) + # Display information: how to get the logs + msg1 = _("End of 'sat run'. To see traces, type:") + msg2 = UTS.label("sat log " + config.VARS.application) + msg = "%s\n%s\n" % (msg1, msg2) + logger.info(msg) - return 0 + return RCO.ReturnCode("OK", msg) diff --git a/commands/script.py b/commands/script.py index 83b34b2..f3eec3b 100644 --- a/commands/script.py +++ b/commands/script.py @@ -80,7 +80,7 @@ class Command(_BaseCommand): # Print some informations msg = ('Executing the script in the build directories of the application %s\n') % \ UTS.label(config.VARS.application) - logger.write(msg, 1) + logger.info(msg) info = [(_("BUILD directory"), os.path.join(config.APPLICATION.workdir, 'BUILD'))] UTS.logger_info_tuples(logger, info) @@ -144,10 +144,9 @@ def get_products_list(options, cfg, logger): return products_infos def log_step(logger, header, step): - logger.write("\r%s%s" % (header, " " * 20), 3) - logger.write("\r%s%s" % (header, step), 3) - logger.write("\n==== %s \n" % UTS.info(step), 4) - logger.flush() + logger.info("\r%s%s" % (header, " " * 20)) + logger.info("\r%s%s" % (header, step)) + logger.debug("\n==== %s \n" % UTS.info(step)) def log_res_step(logger, res): if res == 0: @@ -192,21 +191,17 @@ def run_script_of_product(p_name_info, nb_proc, config, logger): p_name, p_info = p_name_info # Logging - logger.write("\n", 4, False) - logger.write("################ ", 4) header = _("Running script of %s") % UTS.label(p_name) header += " %s " % ("." * (20 - len(p_name))) - logger.write(header, 3) - logger.write("\n", 4, False) - logger.flush() + logger.info("\n" + header) # Do nothing if he product is not compilable or has no compilation script - if ( ("properties" in p_info and - "compilation" in p_info.properties and - p_info.properties.compilation == "no") or - (not src.product.product_has_script(p_info)) ): + test1 = "properties" in p_info and \ + "compilation" in p_info.properties and \ + p_info.properties.compilation == "no" + if ( test1 or (not src.product.product_has_script(p_info)) ): log_step(logger, header, "ignored") - logger.write("\n", 3, False) + logger.info("\n") return 0 # Instantiate the class that manages all the construction commands @@ -228,13 +223,13 @@ def run_script_of_product(p_name_info, nb_proc, config, logger): # Log the result if res > 0: - logger.write("\r%s%s" % (header, " " * len_end_line), 3) - logger.write("\r" + header + "") + logger.info("\r%s%s" % (header, " " * len_end_line)) + logger.info("\r" + header + "") logger.debug("==== in script execution of %s\n" % p_name) else: - logger.write("\r%s%s" % (header, " " * len_end_line), 3) - logger.write("\r" + header + "")) + logger.info("\r%s%s" % (header, " " * len_end_line)) + logger.info("\r" + header + "") logger.debug("==== in script execution of %s\n" % p_name) - logger.write("\n") + logger.info("\n") return res diff --git a/commands/shell.py b/commands/shell.py index 0584c89..666b65b 100644 --- a/commands/shell.py +++ b/commands/shell.py @@ -73,7 +73,7 @@ class Command(_BaseCommand): # Print the input command msg = _("Command to execute:\n%s\nExecution ... ") % options.command - logger.write(msg, 3) + logger.info(msg) # Call the input command res = subprocess.call(options.command, @@ -83,9 +83,9 @@ class Command(_BaseCommand): # Format the result to be 0 (success) or 1 (fail) if res != 0: - res = 1 - logger.info("\n") + status = "KO" else: - logger.info("\n") - - return res + status = "OK" + + logger.info("<%s>\n" % status) + return RCO.ReturnCode(status, "shell command done") diff --git a/commands/source.py b/commands/source.py index dcbd7f3..6a802f0 100644 --- a/commands/source.py +++ b/commands/source.py @@ -72,7 +72,7 @@ class Command(_BaseCommand): src.check_config_has_application( config ) # Print some informations - logger.write(_('Getting sources of the application %s\n') % \ + logger.info(_('Getting sources of the application %s\n') % \ UTS.label(config.VARS.application), 1) logger.info(" workdir = %s\n" % config.APPLICATION.workdir) @@ -103,7 +103,8 @@ class Command(_BaseCommand): def get_source_for_dev(config, product_info, source_dir, logger, pad): - '''The method called if the product is in development mode + """\ + Called if the product is in development mode :param config Config: The global configuration :param product_info Config: The configuration specific to @@ -114,7 +115,7 @@ def get_source_for_dev(config, product_info, source_dir, logger, pad): :param pad int: The gap to apply for the terminal display :return: True if it succeed, else False :rtype: boolean - ''' + """ # Call the function corresponding to get the sources with True checkout retcode = get_product_sources(config, @@ -124,13 +125,9 @@ def get_source_for_dev(config, product_info, source_dir, logger, pad): logger, pad, checkout=True) - logger.write("\n", 3, False) # +2 because product name is followed by ': ' - logger.write(" " * (pad+2), 3, False) - - logger.write('dev: %s ... ' % \ - UTS.info(product_info.source_dir), 3, False) - logger.flush() + logger.info("\n" + " " * (pad+2)) + logger.info('dev: %s ... ' % UTS.info(product_info.source_dir)) return retcode @@ -140,7 +137,8 @@ def get_source_from_git(product_info, pad, is_dev=False, environ = None): - '''The method called if the product is to be get in git mode + """\ + Called if the product is to be get in git mode :param product_info Config: The configuration specific to the product to be prepared @@ -153,7 +151,7 @@ def get_source_from_git(product_info, extracting. :return: True if it succeed, else False :rtype: boolean - ''' + """ # The str to display coflag = 'git' @@ -170,7 +168,7 @@ def get_source_from_git(product_info, msg += " " * (pad + 50 - len(repo_git)) msg += " tag:%s" % product_info.git_info.tag msg += "%s. " % "." * (10 - len(product_info.git_info.tag)) - logger.write("\n" + msg) + logger.info("\n" + msg) # Call the system function that do the extraction in git mode retcode = SYSS.git_extract(repo_git, @@ -194,10 +192,7 @@ def get_source_from_archive(product_info, source_dir, logger): raise Exception(_("Archive not found: '%s'") % \ product_info.archive_info.archive_name) - logger.write('arc:%s ... ' % \ - UTS.info(product_info.archive_info.archive_name), - 3, False) - logger.flush() + logger.info('arc:%s ... ' % UTS.info(product_info.archive_info.archive_name)) # Call the system function that do the extraction in archive mode retcode, NameExtractedDirectory = SYSS.archive_extract( product_info.archive_info.archive_name, @@ -234,11 +229,8 @@ def get_source_from_dir(product_info, source_dir, logger): logger.error(msg) return False - logger.write('DIR: %s ... ' % UTS.info( - product_info.dir_info.dir), 3) - - retcode = src.Path(product_info.dir_info.dir).copy(source_dir) - + logger.info('DIR: %s ... ' % UTS.info(product_info.dir_info.dir)) + retcode = src.Path(product_info.dir_info.dir).copy(source_dir) return retcode def get_source_from_cvs(user, @@ -290,7 +282,7 @@ def get_source_from_cvs(user, # at least one '.' is visible msg += " %s. " % ("." * (10 - len(product_info.cvs_info.tag))) - logger.write(msg) + logger.info(msg) # Call the system function that do the extraction in cvs mode retcode = SYSS.cvs_extract(protocol, user, @@ -324,16 +316,16 @@ def get_source_from_svn(user, coflag = 'svn' if checkout: coflag = coflag.upper() - logger.write('%s:%s ... ' % (coflag, product_info.svn_info.repo) + logger.info('%s:%s ... ' % (coflag, product_info.svn_info.repo)) # Call the system function that do the extraction in svn mode retcode = SYSS.svn_extract(user, - product_info.svn_info.repo, - product_info.svn_info.tag, - source_dir, - logger, - checkout, - environ) + product_info.svn_info.repo, + product_info.svn_info.tag, + source_dir, + logger, + checkout, + environ) return retcode def get_product_sources(config, @@ -402,19 +394,19 @@ def get_product_sources(config, if product_info.get_source == "native": # skip msg = "" + _("\ndo nothing because the product is of type 'native'.\n") - logger.write(msg) + logger.info(msg) return True if product_info.get_source == "fixed": # skip msg = "" + _("\ndo nothing because the product is of type 'fixed'.\n") - logger.write(msg) + logger.info(msg) return True # if the get_source is not in [git, archive, cvs, svn, fixed, native] msg = _("Unknown get source method '%s' for product %s") % \ ( product_info.get_source, product_info.name) - logger.write("%s ... " % msg) + logger.info("%s ... " % msg) return False def get_all_product_sources(config, products, logger): @@ -447,9 +439,8 @@ def get_all_product_sources(config, products, logger): source_dir = src.Path('') # display and log - logger.write('%s: ' % UTS.label(product_name), 3) - logger.write(' ' * (max_product_name_len - len(product_name)), 3, False) - logger.write("\n", 4, False) + logger.info('%s: ' % UTS.label(product_name)) + logger.info(' ' * (max_product_name_len - len(product_name))) # Remove the existing source directory if # the product is not in development mode diff --git a/commands/template.py b/commands/template.py index 3d113b1..6844c2a 100644 --- a/commands/template.py +++ b/commands/template.py @@ -120,16 +120,6 @@ Component name must contains only alphanumeric characters and no spaces\n""") logger.error(msg) return 1 - # CNC inutile - # Ask user confirmation if a module of the same name already exists - #if options.name in config.PRODUCTS and not runner.options.batch: - # logger.write(UTS.red( - # _("A module named '%s' already exists." % options.name)), 1) - # logger.write("\n", 1) - # rep = input(_("Are you sure you want to continue? [Yes/No] ")) - # if rep.upper() != _("YES"): - # return 1 - if options.target is None: logger.error(msg_miss % "target") return 1 @@ -140,17 +130,6 @@ Component name must contains only alphanumeric characters and no spaces\n""") logger.error(msg) return 1 - # CNC inutile - #if options.template == "Application": - # if "_APPLI" not in options.name and not runner.options.batch: - # msg = _("An Application module named '..._APPLI' " - # "is usually recommended.") - # logger.write(UTS.red(msg), 1) - # logger.write("\n", 1) - # rep = input(_("Are you sure you want to continue? [Yes/No] ")) - # if rep.upper() != _("YES"): - # return 1 - msg = "" msg += _('Create sources from template\n') msg += ' destination = %s\n' % target_dir @@ -348,29 +327,26 @@ def search_template(config, template): raise Exception(_("Template not found: %s") % template) return template_src_dir -## -# Prepares a module from a template. + + def prepare_from_template(config, name, template, target_dir, conf_values, logger): + """Prepares a module from a template.""" template_src_dir = search_template(config, template) res = 0 # copy the template if os.path.isfile(template_src_dir): - logger.write(" " + _( - "Extract template %s\n") % UTS.info( - template), 4) + logger.info(_("Extract template %s\n") % UTS.info(template)) SYSS.archive_extract(template_src_dir, target_dir) else: - logger.write(" " + _( - "Copy template %s\n") % UTS.info( - template), 4) + logger.info(_("Copy template %s\n") % UTS.info(template)) shutil.copytree(template_src_dir, target_dir) - logger.write("\n", 5) + compo_name = name if name.endswith("CPP"): @@ -383,7 +359,7 @@ def prepare_from_template(config, tsettings = TemplateSettings(compo_name, settings_file, target_dir) # first rename the files - logger.write(" " + UTS.label(_("Rename files\n")), 4) + logger.debug(UTS.label(_("Rename files\n")) for root, dirs, files in os.walk(target_dir): for fic in files: ff = fic.replace(tsettings.file_subst, compo_name) @@ -392,13 +368,11 @@ def prepare_from_template(config, raise Exception( _("Destination file already exists: %s") % \ os.path.join(root, ff) ) - logger.write(" %s -> %s\n" % (fic, ff), 5) + logger.debug(" %s -> %s\n" % (fic, ff)) os.rename(os.path.join(root, fic), os.path.join(root, ff)) # rename the directories - logger.write("\n", 5) - logger.write(" " + UTS.label(_("Rename directories\n")), - 4) + logger.debug(UTS.label(_("Rename directories\n"))) for root, dirs, files in os.walk(target_dir, topdown=False): for rep in dirs: dd = rep.replace(tsettings.file_subst, compo_name) @@ -407,32 +381,26 @@ def prepare_from_template(config, raise Exception( _("Destination directory already exists: %s") % \ os.path.join(root, dd) ) - logger.write(" %s -> %s\n" % (rep, dd), 5) + logger.debug(" %s -> %s\n" % (rep, dd)) os.rename(os.path.join(root, rep), os.path.join(root, dd)) # ask for missing parameters - logger.write("\n", 5) - logger.write(" " + UTS.label( - _("Make substitution in files\n")), 4) - logger.write(" " + _("Delimiter =") + " %s\n" % tsettings.delimiter_char, - 5) - logger.write(" " + _("Ignore Filters =") + " %s\n" % ', '.join( - tsettings.ignore_filters), 5) + logger.debug(UTS.label(_("Make substitution in files\n"))) + logger.debug(_("Delimiter =") + " %s\n" % tsettings.delimiter_char) + logger.debug(_("Ignore Filters =") + " %s\n" % ', '.join(tsettings.ignore_filters)) dico = tsettings.get_parameters(conf_values) - logger.write("\n", 3) # override standard string.Template class to use the desire delimiter class CompoTemplate(string.Template): delimiter = tsettings.delimiter_char # do substitution - logger.write("\n", 5, True) pathlen = len(target_dir) + 1 for root, dirs, files in os.walk(target_dir): for fic in files: fpath = os.path.join(root, fic) if not tsettings.check_file_for_substitution(fpath[pathlen:]): - logger.write(" - %s\n" % fpath[pathlen:], 5) + logger.debug(" - %s\n" % fpath[pathlen:]) continue # read the file m = file(fpath, 'r').read() @@ -444,26 +412,21 @@ def prepare_from_template(config, if d != m: changed = "*" file(fpath, 'w').write(d) - logger.write(" %s %s\n" % (changed, fpath[pathlen:]), 5) + logger.debug(" %s %s\n" % (changed, fpath[pathlen:])) if not tsettings.has_pyconf: - logger.write(UTS.red(_( - "Definition for sat not found in settings file.")) + "\n", 2) + logger.error(_("Definition for sat not found in settings file.")) else: definition = tsettings.pyconf % dico pyconf_file = os.path.join(target_dir, name + '.pyconf') f = open(pyconf_file, 'w') f.write(definition) f.close - logger.write(_( - "Create configuration file: ") + UTS.info( - pyconf_file) + "\n", 2) + logger.info(_("Create configuration file: ") + pyconf_file) if len(tsettings.post_command) > 0: cmd = tsettings.post_command % dico - logger.write("\n", 5, True) - logger.write(_( - "Run post command: ") + UTS.info(cmd) + "\n", 3) + logger.info(_("Run post command: ") + cmd) p = subprocess.Popen(cmd, shell=True, cwd=target_dir) p.wait() diff --git a/commands/test.py b/commands/test.py index 1d57e61..3d9f262 100644 --- a/commands/test.py +++ b/commands/test.py @@ -29,7 +29,7 @@ import src.returnCode as RCO import src.utilsSat as UTS from src.salomeTools import _BaseCommand import src.ElementTree as etree -from src.xmlManager import add_simple_node +import src.xmlManager as XMLMGR try: from hashlib import sha1 @@ -116,9 +116,8 @@ Optional: set the display where to launch SALOME. # the test base is specified either by the application, or by the --base option with_application = False if config.VARS.application != 'None': - logger.write( - _('Running tests on application %s\n') % - UTS.label(config.VARS.application), 1) + logger.info(_('Running tests on application %s\n') % + UTS.label(config.VARS.application)) with_application = True elif not options.base: raise Exception( @@ -128,16 +127,15 @@ Optional: set the display where to launch SALOME. if with_application: # check if environment is loaded if 'KERNEL_ROOT_DIR' in os.environ: - logger.write( UTS.red( - _("WARNING: SALOME environment already sourced")) + "\n", 1 ) - - + logger.warning(_("SALOME environment already sourced")) + elif options.launcher: - logger.write(UTS.red(_("Running SALOME application.")) + "\n\n", 1) + logger.info(_("Running SALOME application.")) else: msg = _("""\ Impossible to find any launcher. -Please specify an application or a launcher\n""") +Please specify an application or a launcher +""") logger.error(msg) return 1 @@ -251,10 +249,9 @@ Please specify an application or a launcher\n""") retcode = test_runner.run_all_tests() logger.allowPrintLevel = True - logger.write(_("Tests finished"), 1) - logger.write("\n", 2, False) + logger.info(_("Tests finished\n")) - logger.write(_("\nGenerate the specific test log\n"), 5) + logger.debug(_("Generate the specific test log\n")) log_dir = UTS.get_log_path(config) out_dir = os.path.join(log_dir, "TEST") UTS.ensure_path_exists(out_dir) @@ -283,9 +280,7 @@ Please specify an application or a launcher\n""") # Add the historic files into the log files list of the command logger.l_logFiles.append(historic_xml_path) - logger.write( - _("Removing the temporary directory: %s\n" % - test_runner.tmp_working_dir), 5 ) + logger.debug(_("Removing the temporary directory: %s") % test_runner.tmp_working_dir) if os.path.exists(test_runner.tmp_working_dir): shutil.rmtree(test_runner.tmp_working_dir) @@ -335,7 +330,7 @@ def move_test_results(in_dir, what, out_dir, logger): while not pathIsOk: try: # create test results directory if necessary - #logger.write("FINAL = %s\n" % finalPath, 5) + #logger.debug("FINAL = %s\n" % finalPath) if not os.access(finalPath, os.F_OK): #shutil.rmtree(finalPath) os.makedirs(finalPath) @@ -351,9 +346,7 @@ def move_test_results(in_dir, what, out_dir, logger): if not os.access(os.path.join(finalPath, '.objects'), os.F_OK): os.makedirs(os.path.join(finalPath, '.objects')) - logger.write(_('copy tests results to %s ... ') % finalPath, 3) - logger.flush() - #logger.write("\n", 5) + logger.info(_('copy tests results to %s ... ') % finalPath) # copy env_info.py shutil.copy2(os.path.join(in_dir, what, 'env_info.py'), @@ -369,7 +362,7 @@ def move_test_results(in_dir, what, out_dir, logger): continue os.makedirs(outtestbase) - #logger.write(" copy testbase %s\n" % testbase, 5) + #logger.debug("copy testbase %s\n" % testbase) for grid_ in [m for m in os.listdir(intestbase) if os.path.isdir( os.path.join(intestbase, m))]: @@ -380,7 +373,7 @@ def move_test_results(in_dir, what, out_dir, logger): outgrid = os.path.join(outtestbase, grid_) ingrid = os.path.join(intestbase, grid_) os.makedirs(outgrid) - #logger.write(" copy grid %s\n" % grid_, 5) + #logger.debug("copy grid %s" % grid_) if grid_ == 'RESSOURCES': for file_name in os.listdir(ingrid): @@ -464,23 +457,24 @@ def create_test_report(config, # de remontee de log des tests print "TRACES OP - test.py/create_test_report() : xml_history_path = '#%s#'" %xml_history_path + ASNODE = XMLMGR.add_simple_node # shortcut + if withappli: if not first_time: for node in (prod_node.findall("version_to_download") + prod_node.findall("out_dir")): prod_node.remove(node) - add_simple_node(prod_node, "version_to_download", - config.APPLICATION.name) + ASNODE(prod_node, "version_to_download", config.APPLICATION.name) - add_simple_node(prod_node, "out_dir", config.APPLICATION.workdir) + ASNODE(prod_node, "out_dir", config.APPLICATION.workdir) # add environment if not first_time: for node in prod_node.findall("exec"): prod_node.remove(node) - exec_node = add_simple_node(prod_node, "exec") + exec_node = ASNODE(prod_node, "exec") exec_node.append(etree.Element("env", name="Host", value=config.VARS.node)) exec_node.append(etree.Element("env", name="Architecture", value=config.VARS.dist)) @@ -495,10 +489,10 @@ def create_test_report(config, if 'TESTS' in config: if first_time: - tests = add_simple_node(prod_node, "tests") - known_errors = add_simple_node(prod_node, "known_errors") - new_errors = add_simple_node(prod_node, "new_errors") - amend = add_simple_node(prod_node, "amend") + tests = ASNODE(prod_node, "tests") + known_errors = ASNODE(prod_node, "known_errors") + new_errors = ASNODE(prod_node, "new_errors") + amend = ASNODE(prod_node, "amend") else: tests = prod_node.find("tests") known_errors = prod_node.find("known_errors") @@ -514,7 +508,7 @@ def create_test_report(config, for testbase in tt.keys(): if first_time: - gn = add_simple_node(tests, "testbase") + gn = ASNODE(tests, "testbase") else: gn = tests.find("testbase") # initialize all grids and session to "not executed" @@ -542,7 +536,7 @@ def create_test_report(config, for test in tt[testbase]: if not grids.has_key(test.grid): if first_time: - mn = add_simple_node(gn, "grid") + mn = ASNODE(gn, "grid") mn.attrib['name'] = test.grid else: l_mn = gn.findall("grid") @@ -552,7 +546,7 @@ def create_test_report(config, mn = grid_node break if mn == None: - mn = add_simple_node(gn, "grid") + mn = ASNODE(gn, "grid") mn.attrib['name'] = test.grid grids[test.grid] = mn @@ -561,7 +555,7 @@ def create_test_report(config, if not sessions.has_key("%s/%s" % (test.grid, test.session)): if first_time: - tyn = add_simple_node(mn, "session") + tyn = ASNODE(mn, "session") tyn.attrib['name'] = test.session else: l_tyn = mn.findall("session") @@ -571,7 +565,7 @@ def create_test_report(config, tyn = session_node break if tyn == None: - tyn = add_simple_node(mn, "session") + tyn = ASNODE(mn, "session") tyn.attrib['name'] = test.session sessions["%s/%s" % (test.grid, test.session)] = tyn @@ -580,12 +574,12 @@ def create_test_report(config, for script in test.script: if first_time: - tn = add_simple_node(sessions[ + tn = ASNODE(sessions[ "%s/%s" % (test.grid, test.session)], "test") tn.attrib['session'] = test.session tn.attrib['script'] = script.name - hn = add_simple_node(tn, "history") + hn = ASNODE(tn, "history") else: l_tn = sessions["%s/%s" % (test.grid, test.session)].findall( "test") @@ -596,23 +590,23 @@ def create_test_report(config, break if tn == None: - tn = add_simple_node(sessions[ + tn = ASNODE(sessions[ "%s/%s" % (test.grid, test.session)], "test") tn.attrib['session'] = test.session tn.attrib['script'] = script.name - hn = add_simple_node(tn, "history") + hn = ASNODE(tn, "history") else: # Get or create the history node for the current test if len(tn.findall("history")) == 0: - hn = add_simple_node(tn, "history") + hn = ASNODE(tn, "history") else: hn = tn.find("history") # Put the last test data into the history if 'res' in tn.attrib: attributes = {"date_hour" : date_hour, "res" : tn.attrib['res'] } - add_simple_node(hn, + ASNODE(hn, "previous_test", attrib=attributes) for node in tn: @@ -621,7 +615,7 @@ def create_test_report(config, if 'callback' in script: try: - cnode = add_simple_node(tn, "callback") + cnode = ASNODE(tn, "callback") if src.architecture.is_windows(): import string cnode.text = filter( @@ -634,19 +628,19 @@ def create_test_report(config, zz = (script.callback[:exc.start] + '?' + script.callback[exc.end-2:]) - cnode = add_simple_node(tn, "callback") + cnode = ASNODE(tn, "callback") cnode.text = zz.decode("UTF-8") # Add the script content - cnode = add_simple_node(tn, "content") + cnode = ASNODE(tn, "content") cnode.text = script.content # Add the script execution log - cnode = add_simple_node(tn, "out") + cnode = ASNODE(tn, "out") cnode.text = script.out if 'amend' in script: - cnode = add_simple_node(tn, "amend") + cnode = ASNODE(tn, "amend") cnode.text = script.amend.decode("UTF-8") if script.time < 0: @@ -656,7 +650,7 @@ def create_test_report(config, tn.attrib['res'] = script.res if "amend" in script: - amend_test = add_simple_node(amend, "atest") + amend_test = ASNODE(amend, "atest") amend_test.attrib['name'] = os.path.join(test.grid, test.session, script.name) @@ -671,7 +665,7 @@ def create_test_report(config, else: nb_not_run += 1 if "known_error" in script: - kf_script = add_simple_node(known_errors, "error") + kf_script = ASNODE(known_errors, "error") kf_script.attrib['name'] = os.path.join(test.grid, test.session, script.name) @@ -688,7 +682,7 @@ def create_test_report(config, kf_script.attrib['overdue'] = str(overdue) elif script.res == src.KO_STATUS: - new_err = add_simple_node(new_errors, "new_error") + new_err = ASNODE(new_errors, "new_error") script_path = os.path.join(test.grid, test.session, script.name) new_err.attrib['name'] = script_path @@ -718,12 +712,8 @@ def create_test_report(config, if not xmlname.endswith(".xml"): xmlname += ".xml" - src.xmlManager.write_report(os.path.join(dest_path, xmlname), - root, - "test.xsl") - src.xmlManager.write_report(xml_history_path, - root, - "test_history.xsl") + XMLMGR.write_report(os.path.join(dest_path, xmlname), root, "test.xsl") + XMLMGR.write_report(xml_history_path, root, "test_history.xsl") return src.OK_STATUS def generate_history_xml_path(config, test_base): diff --git a/data/templates/Application/config/compile.py b/data/templates/Application/config/compile.py index 793663c..9a88028 100755 --- a/data/templates/Application/config/compile.py +++ b/data/templates/Application/config/compile.py @@ -18,7 +18,7 @@ def compil(config, builder, logger): command = "which lrelease" res = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE,env=builder.build_environ.environ.environ).communicate() if res[1] != "": #an error occured - logger.write("ERROR: %s" % res[1]) + logger.error(res[1]) builder.log(res[1]+"\n") return 1 diff --git a/src/coloringSat.py b/src/coloringSat.py index 840d232..dcc98e2 100755 --- a/src/coloringSat.py +++ b/src/coloringSat.py @@ -82,6 +82,8 @@ _tags = ( ("", ST.RESET_ALL), ("", ST.RESET_ALL), ("
", FG.BLUE), + ("