From: Christian Van Wambeke Date: Fri, 1 Jun 2018 21:21:07 +0000 (+0200) Subject: fix return code X-Git-Url: http://git.salome-platform.org/gitweb/?a=commitdiff_plain;h=2724d47656e72a30213b09bb8a7999d6c187bc57;p=tools%2Fsat.git fix return code --- diff --git a/commands/compile.py b/commands/compile.py index 3d37acd..a266371 100644 --- a/commands/compile.py +++ b/commands/compile.py @@ -144,7 +144,7 @@ class Command(_BaseCommand): nb_products = len(products_infos) nb_ok = res.getValue() - logger.info(_("\nCompilation: <%(0)s> (%(1)d/%(2)d)\n") % \ + logger.info(_("\nCompile: <%(0)s> (%(1)d/%(2)d)\n") % \ { '0': res.getStatus(), '1': nb_ok, '2': nb_products } ) @@ -161,7 +161,7 @@ class Command(_BaseCommand): List of (str, Config) => (product_name, product_info) :param logger: (Logger) The logger instance to use for the display and logging - :return: (RCO.ReturnCode) with value as the number of failing commands. + :return: (RCO.ReturnCode) with value as the number of products ok. """ # shortcuts config = self.getConfig() @@ -277,8 +277,8 @@ class Command(_BaseCommand): UTS.end_log_step(logger, res[-1]) resAll = RCO.ReturnCodeFromList(res) - nbOk = len([r for r in res if r.isOk()]) - nbKo = len([r for r in res if not r.isOk()]) + nbOk = sum(1 for r in res if r.isOk()) + nbKo = sum(1 for r in res if not r.isOk()) if resAll.isOk(): # no failing commands return RCO.ReturnCode("OK", "No failing compile commands", nbOk) else: diff --git a/commands/configure.py b/commands/configure.py index 4d776aa..cf83b8c 100644 --- a/commands/configure.py +++ b/commands/configure.py @@ -105,16 +105,14 @@ class Command(_BaseCommand): if good_result == nbExpected: status = "OK" msg = _("command configure") - logger.info("\n%s %s: <%s>.\n" % (msg, msgCount, status)) + logger.info("\n%s %s: <%s>" % (msg, msgCount, status)) else: status = "KO" msg = _("command configure, some products have failed") - logger.info("\n%s %s: <%s>.\n" % (msg, msgCount, status)) + logger.info("\n%s %s: <%s>" % (msg, msgCount, status)) return RCO.ReturnCode(status, "%s %s" % (msg, msgCount)) - - def configure_all_products(self, products_infos): """ Execute the proper configuration commands diff --git a/commands/generate.py b/commands/generate.py index 47fce6b..dd5b19d 100644 --- a/commands/generate.py +++ b/commands/generate.py @@ -80,8 +80,6 @@ class Command(_BaseCommand): UTS.label(config.VARS.application) logger.info(msg) - status = RCO._KO_STATUS - # verify that YACSGEN is available rc = check_yacsgen(config, options.yacsgen, logger) if not rc.isOk(): @@ -99,57 +97,69 @@ class Command(_BaseCommand): products = config.APPLICATION.products if options.products: products = options.products - - details = [] - nbgen = 0 - - context = build_context(config, logger) - lprod = UTS.label(product) + + rc = build_context(config, logger) + if not rc.isOk(): + return rc + + # ok + context = rc.geValue() + res = [] for product in products: - header = _("Generating %s") % lprod - header += " %s " % ("." * (20 - len(product))) - logger.info(header) - - if product not in config.PRODUCTS: - logger.error(_("Unknown product %s") % lprod) - continue - - pi = PROD.get_product_config(config, product) - if not PROD.product_is_generated(pi): - logger.info(_("not a generated product %s") % lprod) - continue - - nbgen += 1 - try: - result = generate_component_list(config, pi, context, logger) - except Exception as exc: - result = str(exc) - - if result != RCO._OK_STATUS: - details.append([product, result]) - - if len(details) != 0: - msg = _("The following modules were not generated correctly:\n") - for d in details: - msg += " %s: %s\n" % (d[0], d[1]) - logger.error(msg) - return RCO.ReturnCode("KO", msg) + lprod = UTS.label(product) + header = _("Generating %s") % lprod + logger.info(header) + + if product not in config.PRODUCTS: + rc = RCO.ReturnCode("KO", "Unknown product %s" % lprod) + res.append(rc) + continue + + pi = PROD.get_product_config(config, product) + if not PROD.product_is_generated(pi): + rc = RCO.ReturnCode("KO", "Not a generated product %s" % lprod) + res.append(rc) + continue + + rc = self.generate_component_list(pi, context) + res.append(rc) + + good_result = sum(1 for r in res if r.isOk()) + fails = ["\n" + r.getWhy() for r in res if not r.isOk()] + nbExpected = len(products) + msgCount = "(%d/%d)" % (good_result, nbExpected) + if good_result == nbExpected: + status = "OK" + msg = _("command generate") + logger.info("\n%s %s: <%s>" % (msg, msgCount, status)) else: - return RCO.ReturnCode("OK", "command generate done") - + status = "KO" + msg = _("command generate, some products have failed") + logger.error(msg + "".join(fails)) + logger.info("\n%s %s: <%s>" % (msg, msgCount, status)) + + return RCO.ReturnCode(status, "%s %s" % (msg, msgCount)) -def generate_component_list(config, product_info, context, logger): + def generate_component_list(self, product_info, context): """returns list of ReturnCode of elementary generate_component""" + # shortcuts + logger = self.getLogger() + res = [] for compo in PROD.get_product_components(product_info): - header = " %s ... " % UTS.label(compo) - rc = generate_component(config, compo, product_info, context, header, logger) - res.append(rc) - logger.info("%s %s" % (header, rc)) + header = " %s ... " % UTS.label(compo) + rc = self.generate_component(compo, product_info, context) + res.append(rc) + logger.info("%s %s" % (header, rc)) + res = RCO.ReturnCodeFromList(res) return res -def generate_component(config, compo, product_info, context, header, logger): + def generate_component(self, compo, product_info, context): """get from config include file name and librairy name, or take default value""" + # shortcuts + logger = self.getLogger() + config = self.getConfig() + if "hxxfile" in product_info: hxxfile = product_info.hxxfile else: @@ -200,17 +210,21 @@ def generate_component(config, compo, product_info, context, header, logger): curdir = os.curdir os.chdir(generate_dir) + ################################################ # inline class to override bootstrap method import module_generator as MG class sat_generator(MG.Generator): - # old bootstrap for automake (used if salome version <= 7.4) - def bootstrap(self, source_dir, logger): - # replace call to default bootstrap() by using subprocess Popen - cmd = "sh autogen.sh" - rc = UTS.Popen(cmd, cwd=source_dir, logger=logger) - rc.raiseIfKo() - return rc + def bootstrap(self, source_dir, logger): + """ + old bootstrap for automake (used if salome version <= 7.4) + replace call to default bootstrap() by using subprocess Popen + """ + cmd = "sh autogen.sh" + rc = UTS.Popen(cmd, cwd=source_dir, logger=logger) + rc.raiseIfKo() + return rc + ################################################ # determine salome version VersionSalome = UTS.get_salome_version(config) @@ -249,7 +263,6 @@ def generate_component(config, compo, product_info, context, header, logger): # go back to previous directory os.chdir(curdir) - # do the compilation using the builder object rc = builder.prepare() if not rc.isOk(): return rc @@ -314,25 +327,28 @@ def check_module_generator(directory=None): """Check if module_generator is available. :param directory: (str) The directory of YACSGEN. - :return: (str) - The YACSGEN path if the module_generator is available, else None + :return: (RCO.ReturnCode) + with value The YACSGEN path if the module_generator is ok """ undo = False if directory is not None and directory not in sys.path: - sys.path.insert(0, directory) - undo = True + sys.path.insert(0, directory) + undo = True res = None try: - #import module_generator - info = imp.find_module("module_generator") - res = info[1] + #import module_generator + info = imp.find_module("module_generator") + res = info[1] except ImportError: - if undo: - sys.path.remove(directory) - res = None + if undo: + sys.path.remove(directory) + res = None - return res + if res is None: + return RCO.ReturnCode("KO", "module_generator.py not found", res) + else: + return RCO.ReturnCode("KO", "module_generator.py found", res) def check_yacsgen(config, directory, logger): """Check if YACSGEN is available. @@ -344,42 +360,39 @@ def check_yacsgen(config, directory, logger): with value The path to yacsgen directory if ok """ # first check for YACSGEN (command option, then product, then environment) - yacsgen_dir = None - yacs_src = "?" if directory is not None: - yacsgen_dir = directory - yacs_src = _("Using YACSGEN from command line") + yacsgen_dir = directory + yacs_src = _("Using YACSGEN from command line") elif 'YACSGEN' in config.APPLICATION.products: - yacsgen_info = PROD.get_product_config(config, 'YACSGEN') - yacsgen_dir = yacsgen_info.install_dir - yacs_src = _("Using YACSGEN from application") + yacsgen_info = PROD.get_product_config(config, 'YACSGEN') + yacsgen_dir = yacsgen_info.install_dir + yacs_src = _("Using YACSGEN from application") elif os.environ.has_key("YACSGEN_ROOT_DIR"): - yacsgen_dir = os.getenv("YACSGEN_ROOT_DIR") - yacs_src = _("Using YACSGEN from environment") + yacsgen_dir = os.getenv("YACSGEN_ROOT_DIR") + yacs_src = _("Using YACSGEN from environment") + else: + RCO.ReturnCode("KO", "The generate command requires YACSGEN.") - if yacsgen_dir is None: - RCO.ReturnCode("KO", _("The generate command requires YACSGEN.")) - logger.info(" %s in %s" % (yacs_src, yacsgen_dir)) if not os.path.exists(yacsgen_dir): - msg = _("YACSGEN directory not found: '%s'") % yacsgen_dir + msg = _("YACSGEN directory not found: %s") % yacsgen_dir RCO.ReturnCode("KO", msg) # load module_generator - c = check_module_generator(yacsgen_dir) - if c is not None: - return RCO.ReturnCode("OK", "check_module_generator on %s" % yacsgen_dir, c) + rc = check_module_generator(yacsgen_dir) + if rc.isOk(): + return rc pv = os.getenv("PYTHON_VERSION") if pv is None: - python_info = PROD.get_product_config(config, "Python") - pv = '.'.join(python_info.version.split('.')[:2]) + python_info = PROD.get_product_config(config, "Python") + pv = '.'.join(python_info.version.split('.')[:2]) assert pv is not None, "$PYTHON_VERSION not defined" yacsgen_dir = os.path.join(yacsgen_dir, "lib", "python%s" % pv, "site-packages") - c = check_module_generator(yacsgen_dir) - if c is not None: - return RCO.ReturnCode("OK", "check_module_generator on %s" % yacsgen_dir, c) + rc = check_module_generator(yacsgen_dir) + if rc.isOk(): + return rc - return RCO.ReturnCode("KO", _("The python module module_generator was not found in YACSGEN")) + return RCO.ReturnCode("KO", "The python module module_generator was not found in YACSGEN") diff --git a/commands/init.py b/commands/init.py index 9653e25..bcaf866 100644 --- a/commands/init.py +++ b/commands/init.py @@ -17,6 +17,7 @@ # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +import os import src.debug as DBG import src.returnCode as RCO @@ -78,29 +79,30 @@ class Command(_BaseCommand): # Print some informations logger.info(_('Local Settings of SAT %s') % UTS.label(config.VARS.salometoolsway)) - res = 0 - + res = [] # Set the options corresponding to a directory for opt in [("base" , options.base), ("workdir", options.workdir), ("log_dir", options.log_dir), ("archive_dir", options.archive_dir)]: - key, value = opt - if value: - res_check = check_path(value, logger) - res += res_check - if res_check == 0: - res_set = set_local_value(config, key, value, logger) - res += res_set + key, value = opt + if value: + rc = check_path(value, logger) + res.append(rc) + if rc.isOk(): + rc = set_local_value(config, key, value, logger) + res.append(rc) # Set the options corresponding to an informative value for opt in [("VCS", options.VCS), ("tag", options.tag)]: - key, value = opt - res_set = set_local_value(config, key, value, logger) - res += res_set + key, value = opt + rc = set_local_value(config, key, value, logger) + res.append(rc) - display_local_values(config, logger) + msg = get_str_local_values(config) + logger.info(msg) + res = RCO.ReturnCodeFromList(res) return res @@ -111,42 +113,40 @@ def set_local_value(config, key, value, logger): :param key: (str) The key from which to change the value. :param value: (str) The path to change. :param logger: (Logger) The logger instance. - :return: (int) 0 if all is OK, else 1 + :return: (RCO.ReturnCode) """ local_file_path = os.path.join(config.VARS.datadir, "local.pyconf") # Update the local.pyconf file try: - local_cfg = PYCONF.Config(local_file_path) - local_cfg.LOCAL[key] = value - ff = open(local_file_path, 'w') - local_cfg.__save__(ff, 1) - ff.close() - if key != "log_dir": - config.LOCAL[key] = value + local_cfg = PYCONF.Config(local_file_path) + local_cfg.LOCAL[key] = value + with open(local_file_path, 'w') as ff: + local_cfg.__save__(ff, 1) + if key != "log_dir": + config.LOCAL[key] = value except Exception as e: - err = str(e) - msg = _("Unable to update the local.pyconf file: %s\n") % err - logger.error(msg) - return RCO.ReturnCode("KO", msg) + err = str(e) + msg = "Unable to update the local.pyconf file: %s" % str(e) + logger.error(msg) + return RCO.ReturnCode("KO", msg) return RCO.ReturnCode("OK") -def display_local_values(config, logger): - """Display the base path +def get_str_local_values(config): + """get string to display the base path :param config: (Config) The global configuration. - :param key: (str) The key from which to change the value. - :param logger: (Logger) The logger instance. + :return: (str) with infos from config """ - info = [("base", config.LOCAL.base), - ("workdir", config.LOCAL.workdir), - ("log_dir", config.LOCAL.log_dir), - ("archive_dir", config.LOCAL.archive_dir), - ("VCS", config.LOCAL.VCS), - ("tag", config.LOCAL.tag)] - UTS.logger_info_tuples(logger, info) - - return 0 + loc = config.LOCAL + info = [("base", loc.base), + ("workdir", loc.workdir), + ("log_dir", loc.log_dir), + ("archive_dir", loc.archive_dir), + ("VCS", loc.VCS), + ("tag", loc.tag)] + res = UTS.formatTuples(info) + return res def check_path(path_to_check, logger): """Verify that the given path is not a file and can be created. @@ -155,28 +155,23 @@ def check_path(path_to_check, logger): :param logger: (Logger) The logger instance. """ if path_to_check == "default": - return 0 + return RCO.ReturnCode("OK", "check_path default") - # Get the path - path = UTS.Path(path_to_check) # If it is a file, do nothing and return error - if path.isfile(): - msg = _("""\ + if os.path.isfile(path_to_check): + msg = _("""\ The given path is a file: %s -Please provide a path to a directory\n""") % UTS.blue(path_to_check) - logger.error(msg) - return 1 +Please provide a path to a directory""") % UTS.blue(path_to_check) + logger.error(msg) + return RCO.ReturnCode("KO", "%s have to be directory, is file" % path_to_check) # Try to create the given path try: - UTS.ensure_path_exists(str(path)) + UTS.ensure_path_exists(path_to_check) except Exception as e: - msg = _("""\ -Unable to create the directory %s: - -%s\n""") % (UTS.blue(str(path)), UTS.yellow(e)) - logger.error(msg) - return 1 + msg = "Unable to create the directory %s:\n%s" % (UTS.blue(path_to_check), UTS.yellow(str(e))) + logger.error(msg) + return RCO.ReturnCode("KO", "Unable to create the directory %s" % path_to_check) - return 0 + return RCO.ReturnCode("OK", "check_path %s" % path_to_check) diff --git a/commands/jobs.py b/commands/jobs.py index fa8a03a..775ae9e 100644 --- a/commands/jobs.py +++ b/commands/jobs.py @@ -185,8 +185,8 @@ Use the --list option to get the possible files.\n""") % config_file for path in l_conf_files_path]) + ".pyconf" path_pyconf = UTS.get_tmp_filename(config, name_pyconf) #Save config - f = file( path_pyconf , 'w') - config_jobs.__save__(f) + with open(path_pyconf , 'w') as f: + config_jobs.__save__(f) # log the paramiko problems log_dir = UTS.get_log_path(config) @@ -230,7 +230,7 @@ Use the --list option to get the possible files.\n""") % config_file logger, file_boards = options.input_boards) - logger.debug("\n\n") + logger.debug("") # Display the list of the xml files logger.info(("List of published files:\n%s\n") % gui.xml_global_file.logFile) @@ -315,8 +315,8 @@ class Machine(object): def connect(self, logger): """Initiate the ssh connection to the remote machine - :param logger: The logger instance - :return: None + :param logger: (Logger) The logger instance + :return: (RCO.ReturnCode) OK/KO and why as message """ self._connection_successful = False @@ -328,37 +328,40 @@ class Machine(object): username=self.user, password = self.password) except self.paramiko.AuthenticationException: - message = RCO._KO_STATUS + _("Authentication failed") + rc = RCO.ReturnCode("KO", _("Authentication failed")) except self.paramiko.BadHostKeyException: - message = (RCO._KO_STATUS + _("The server's host key could not be verified")) + rc = RCO.ReturnCode("KO", _("The server's host key could not be verified")) except self.paramiko.SSHException: - message = ( _("SSHException error connecting or establishing an SSH session")) + rc = RCO.ReturnCode("KO", _("SSHException error connecting or establishing an SSH session")) except: - message = ( _("Error connecting or establishing an SSH session")) + rc = RCO.ReturnCode("KO", _("Error connecting or establishing an SSH session")) else: self._connection_successful = True - message = "" - return message + rc = RCO.ReturnCode("OK", "connecting SSH session done on %s" % self.host) + return rc def successfully_connected(self, logger): """ Verify if the connection to the remote machine has succeed :param logger: The logger instance - :return: (bool) True if the connection has succeed, False if not + :return: (RCO.ReturnCode) OK/KO and why as message """ if self._connection_successful == None: - message = _("""\ + msg = _("""\ Ask if the connection (name: %(1)s host: %(2)s, port: %(3)s, user: %(4)s) is OK whereas there were no connection request""" % \ {"1": self.name, "2": self.host, "3": self.port, "4": self.user} ) - logger.critical(UTS.red(message)) + logger.critical(UTS.red(msg)) return self._connection_successful def copy_sat(self, sat_local_path, job_file): - """Copy salomeTools to the remote machine in self.sat_path""" - res = 0 + """ + Copy salomeTools to the remote machine in self.sat_path + + :return: (RCO.ReturnCode) OK/KO and why as message + """ try: # open a sftp connection self.sftp = self.ssh.open_sftp() @@ -370,8 +373,9 @@ whereas there were no connection request""" % \ # on the remote machine remote_job_file_name = ".%s" % os.path.basename(job_file) self.sftp.put(job_file, os.path.join(self.sat_path, remote_job_file_name)) + res = RCO.ReturnCode("OK", "copy sat done on %s" % self.host) except Exception as e: - res = str(e) + res = RCO.ReturnCode("KO", "copy sat problem on %s\n%s" % (self.host, str(e))) self._connection_successful = False return res @@ -1007,7 +1011,7 @@ The job will not be launched. step = "SSH connection" self.logger.info( begin_line + endline + step) # the call to the method that initiate the ssh connection - msg = machine.connect(self.logger) + msg = machine.connect() # Copy salomeTools to the remote machine if machine.successfully_connected(self.logger): @@ -1054,10 +1058,7 @@ The job will not be launched. (begin_line, endline, "", _("Copy of SAT failed: %s") % res_copy)) else: - self.logger.info('\r%s' % - ((len(begin_line)+len(endline)+20) * " ")) - self.logger.info('\r%s%s%s %s' % (begin_line, endline, "", msg)) - self.logger.info("\n") + self.logger.info("%s" % msg) self.logger.info("\n") @@ -1085,7 +1086,7 @@ The job will not be launched. Updates the lists that store the currently running jobs and the jobs that have already finished. - :return: None + :return: (bool) nb_job_finished_now > nb_job_finished_before """ jobs_finished_list = [] jobs_running_list = [] @@ -1109,7 +1110,6 @@ The job will not be launched. :return: None """ - for job in self.ljobs: if job.after is None: continue @@ -1121,7 +1121,7 @@ The job will not be launched. """Returns the job by its name. :param name: (str) a job name - :return: (Job) the job that has the name. + :return: (Job) the job that has the name, else None """ for jb in self.ljobs: if jb.name == name: diff --git a/commands/log.py b/commands/log.py index 633ab00..fd24039 100644 --- a/commands/log.py +++ b/commands/log.py @@ -104,7 +104,7 @@ class Command(_BaseCommand): nb_files_log_dir = len(glob.glob(os.path.join(logDir, "*"))) info = [("log directory", logDir), ("number of log files", nb_files_log_dir)] - UTS.logger_info_tuples(logger, info) + logger.info(UTS.formatTuples(info)) # If the clean options is invoked, # do nothing but deleting the concerned files. @@ -226,7 +226,7 @@ class Command(_BaseCommand): UTS.update_hat_xml(logDir, application = config.VARS.application, notShownCommands = notShownCommands) - logger.info("\n") + logger.info("") # open the hat xml in the user editor if not options.no_browser: @@ -243,7 +243,7 @@ def get_last_log_file(logDir, notShownCommands): :param logDir: (str) The directory where to search the log files :param notShownCommands: (list) the list of commands to ignore - :return: (str) the path to the last log file + :return: (str) the path to the last log file, None if no log file """ last = (_, 0) for fileName in os.listdir(logDir): @@ -292,12 +292,12 @@ def print_log_command_in_terminal(filePath, logger): for attrib in dAttrText: lAttrText.append((attrib, dAttrText[attrib])) - UTS.logger_info_tuples(logger, lAttrText) + logger.info(UTS.formatTuples(lAttrText)) # Get the traces command_traces = xmlRead.get_node_text('Log') # Print it if there is any if command_traces: - msg = _("Here are the command traces :\n%s\n") % command_traces + msg = _("Here are the command traces :\n%s") % command_traces logger.info(msg) def getMaxFormat(aListOfStr, offset=1): @@ -320,6 +320,7 @@ def show_last_logs(logger, config, log_dirs): if maxLen > 33: nb_cols = 2 if maxLen > 50: nb_cols = 1 col_size = (nb / nb_cols) + 1 + lmsg = [] for index in range(0, col_size): msg = "" for i in range(0, nb_cols): @@ -329,8 +330,9 @@ def show_last_logs(logger, config, log_dirs): str_indice = UTS.label("%2d" % (k+1)) log_name = l msg += fmt2 % (str_indice, log_name) - logger.info(msg + "\n") - + lmsg.append(msg) + logger.info("\n".join(lmsg)) + # loop till exit x = -1 while (x < 0): @@ -338,6 +340,8 @@ def show_last_logs(logger, config, log_dirs): if x > 0: product_log_dir = os.path.join(log_dir, log_dirs[x-1]) show_product_last_logs(logger, config, product_log_dir) + + return RCO.ReturnCode("OK", "show_last_logs done") def show_product_last_logs(logger, config, product_log_dir): """Show last compilation logs of a product""" @@ -386,5 +390,5 @@ def ask_value(nb): x = -1 except: x = -1 - + return x diff --git a/commands/package.py b/commands/package.py index 2e2260a..67e548e 100644 --- a/commands/package.py +++ b/commands/package.py @@ -44,6 +44,14 @@ SOURCE = "Source" PROJECT = "Project" SAT = "Sat" +_CHMOD_STAT = stat.S_IRUSR | \ + stat.S_IRGRP | \ + stat.S_IROTH | \ + stat.S_IWUSR | \ + stat.S_IXUSR | \ + stat.S_IXGRP | \ + stat.S_IXOTH + ARCHIVE_DIR = "ARCHIVES" PROJECT_DIR = "PROJECT" @@ -175,7 +183,7 @@ Needs a type for the package Use one of the following options: '--binaries' '--sources' '--project' or '--salometools'\n""") logger.error(msg) - return 1 + return RCO.ReturnCode("KO", "Needs a type for the package") # The repository where to put the package if not Binary or Source package_default_path = config.LOCAL.workdir @@ -203,7 +211,7 @@ Use one of the following options: The project %s is not visible by salomeTools. Please add it in the %s file.\n""") % (options.project, local_path) logger.error(msg) - return 1 + return RCO.ReturnCode("KO", "The project %s is not visible by salomeTools" % options.project) # Remove the products that are filtered by the --without_property option if options.without_property: @@ -253,17 +261,17 @@ Cannot name the archive. check if at least one of the following options was selected: '--binaries' '--sources' '--project' or '--salometools'\n""") logger.error(msg) - return 1 + return RCO.ReturnCode("KO", "Cannot name the archive") path_targz = os.path.join(dir_name, archive_name + ".tgz") - logger.info(" Package path = %s\n" % UTS.blue(path_targz)) + logger.info(" Package path = %s" % UTS.blue(path_targz)) # Create a working directory for all files that are produced during the # package creation and that will be removed at the end of the command tmp_working_dir = os.path.join(config.VARS.tmp_root, config.VARS.datehour) UTS.ensure_path_exists(tmp_working_dir) - logger.debug(_("The temporary working directory: %s\n") % tmp_working_dir) + logger.debug(_("The temporary working directory: %s") % tmp_working_dir) msg = _("Preparation of files to add to the archive") logger.info(UTS.label(msg)) @@ -304,7 +312,7 @@ check if at least one of the following options was selected: d_paths_to_substitute, "install_bin.sh") d_files_to_add.update({"install_bin" : (file_install_bin, "install_bin.sh")}) - logger.debug("substitutions to be done later:\n%s\n" % str(d_paths_to_substitute)) + logger.debug("substitutions to be done later:\n%s" % str(d_paths_to_substitute)) else: # --salomeTool option is not considered when --sources is selected, as this option @@ -317,9 +325,9 @@ check if at least one of the following options was selected: d_files_to_add.update(project_package(options.project, tmp_working_dir)) if not(d_files_to_add): - msg = _("Empty dictionary to build the archive.\n") + msg = _("Empty dictionary to build the archive.") logger.error(msg) - return 1 + return RCO.ReturnCode("KO", msg) # Add the README file in the package local_readme_tmp_path = add_readme(config, options, tmp_working_dir) @@ -329,39 +337,37 @@ check if at least one of the following options was selected: if options.add_files: for file_path in options.add_files: if not os.path.exists(file_path): - msg = _("The file %s is not accessible.\n") % file_path + msg = _("The file %s is not accessible.") % file_path + logger.warning(msg) continue file_name = os.path.basename(file_path) d_files_to_add[file_name] = (file_path, file_name) msg = UTS.label(_("Actually do the package")) - logger.info("\n%s\n" % msg) + logger.info("\n%s" % msg) try: # Creating the object tarfile - tar = tarfile.open(path_targz, mode='w:gz') - - # get the filtering function if needed - filter_function = exclude_VCS_and_extensions - - # Add the files to the tarfile object - res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function) - tar.close() + with tarfile.open(path_targz, mode='w:gz') as tar: + # get the filtering function if needed + filter_function = exclude_VCS_and_extensions + # Add the files to the tarfile object + res = add_files(tar, archive_name, d_files_to_add, logger, f_exclude=filter_function) except KeyboardInterrupt: - logger.critical(UTS.red(_("KeyboardInterrupt forced interruption\n"))) - logger.info(_("Removing the temporary working directory ... ")) + msg = UTS.red(_("KeyboardInterrupt forced interruption")) + msg += "\n" + _("Removing the temporary working directory ...") + logger.critical(msg) # remove the working directory shutil.rmtree(tmp_working_dir) logger.info("") - return 1 + return RCO.ReturnCode("KO", "KeyboardInterrupt forced interruption") # remove the working directory shutil.rmtree(tmp_working_dir) # Print again the path of the package - logger.info(" Package path = %s\n" % UTS.blue(path_targz)) - + logger.info(" Package path = %s" % UTS.blue(path_targz)) return res @@ -382,7 +388,7 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None): # get the max length of the messages in order to make the display max_len = len(max(d_content.keys(), key=len)) - success = 0 + res = RCO.returnCode("OK", "all tar add files done") # loop over each directory or file stored in the d_content dictionary for name in d_content.keys(): # display information @@ -395,11 +401,11 @@ def add_files(tar, name_archive, d_content, logger, f_exclude=None): # Add it in the archive try: tar.add(local_path, arcname=in_archive, exclude=f_exclude) - logger.info("\n") + logger.info("") except Exception as e: - logger.info(" %s\n" % str(e)) - success = 1 - return success + logger.info(" %s" % str(e)) + res = RCO.returnCode("KO", "problem tar add files") + return res def exclude_VCS_and_extensions(filename): """ @@ -416,6 +422,8 @@ def exclude_VCS_and_extensions(filename): if filename.endswith(extension): return True return False + + def produce_relative_launcher(config, logger, @@ -463,14 +471,13 @@ def produce_relative_launcher(config, filepath = os.path.join(file_dir, file_name) # open the file and write into it - launch_file = open(filepath, "w") - launch_file.write(before) - # Write - writer.write_cfgForPy_file(launch_file, + with open(filepath, "w") as launch_file: + launch_file.write(before) + # Write + writer.write_cfgForPy_file(launch_file, for_package = binaries_dir_name, with_commercial=with_commercial) - launch_file.write(after) - launch_file.close() + launch_file.write(after) # Little hack to put out_dir_Path outside the strings UTS.replace_in_file(filepath, 'r"out_dir_Path', 'out_dir_Path + r"' ) @@ -480,15 +487,7 @@ def produce_relative_launcher(config, hack_for_distene_licence(filepath) # change the rights in order to make the file executable for everybody - os.chmod(filepath, - stat.S_IRUSR | - stat.S_IRGRP | - stat.S_IROTH | - stat.S_IWUSR | - stat.S_IXUSR | - stat.S_IXGRP | - stat.S_IXOTH) - + os.chmod(filepath, _CHMOD_STAT) return filepath def hack_for_distene_licence(filepath): @@ -499,25 +498,23 @@ def hack_for_distene_licence(filepath): shutil.move(filepath, filepath + "_old") fileout= filepath filein = filepath + "_old" - fin = open(filein, "r") - fout = open(fileout, "w") - text = fin.readlines() - # Find the Distene section - num_line = -1 - for i,line in enumerate(text): - if "# Set DISTENE License" in line: - num_line = i - break - if num_line == -1: - # No distene product, there is nothing to do - fin.close() - for line in text: - fout.write(line) - fout.close() - return - del text[num_line +1] - del text[num_line +1] - text_to_insert ="""\ + with open(filein, "r") as fin: + with open(fileout, "w") as fout: + text = fin.readlines() + # Find the Distene section + num_line = -1 + for i,line in enumerate(text): + if "# Set DISTENE License" in line: + num_line = i + break + if num_line == -1: + # No distene product, there is nothing to do + for line in text: + fout.write(line) + return + del text[num_line +1] + del text[num_line +1] + text_to_insert ="""\ import imp try: distene = imp.load_source('distene_licence', '/data/tmpsalome/salome/prerequis/install/LICENSE/dlim8.var.py') @@ -525,11 +522,9 @@ try: except: pass """ - text.insert(num_line + 1, text_to_insert) - for line in text: - fout.write(line) - fin.close() - fout.close() + text.insert(num_line + 1, text_to_insert) + for line in text: + fout.write(line) return def produce_relative_env_files(config, @@ -560,15 +555,7 @@ def produce_relative_env_files(config, UTS.replace_in_file(filepath, '"out_dir_Path', '"${out_dir_Path}' ) # change the rights in order to make the file executable for everybody - os.chmod(filepath, - stat.S_IRUSR | - stat.S_IRGRP | - stat.S_IROTH | - stat.S_IWUSR | - stat.S_IXUSR | - stat.S_IXGRP | - stat.S_IXOTH) - + os.chmod(filepath, _CHMOD_STAT) return filepath def produce_install_bin_file(config, @@ -615,15 +602,7 @@ def produce_install_bin_file(config, content = TPLATE.substitute(installbin_template_path, d) installbin_file.write(content) # change the rights in order to make the file executable for everybody - os.chmod(filepath, - stat.S_IRUSR | - stat.S_IRGRP | - stat.S_IROTH | - stat.S_IWUSR | - stat.S_IXUSR | - stat.S_IXGRP | - stat.S_IXOTH) - + os.chmod(filepath, _CHMOD_STAT) return filepath def product_appli_creation_script(config, @@ -675,20 +654,11 @@ def product_appli_creation_script(config, filled_text = text_to_fill.replace("TO BE FILLED 2", text_to_add) tmp_file_path = os.path.join(file_dir, "create_appli.py") - ff = open(tmp_file_path, "w") - ff.write(filled_text) - ff.close() + with open(tmp_file_path, "w") as ff: + ff.write(filled_text) # change the rights in order to make the file executable for everybody - os.chmod(tmp_file_path, - stat.S_IRUSR | - stat.S_IRGRP | - stat.S_IROTH | - stat.S_IWUSR | - stat.S_IXUSR | - stat.S_IXGRP | - stat.S_IXOTH) - + os.chmod(tmp_file_path,_CHMOD_STAT) return tmp_file_path def binary_package(config, logger, options, tmp_working_dir): @@ -854,12 +824,12 @@ def source_package(sat, config, logger, options, tmp_working_dir): config, logger, tmp_working_dir) - logger.info("Done\n") + logger.info("Done") # Create a project logger.info("Create the project ... ") d_project = create_project_for_src_package(config, tmp_working_dir, options.with_vcs) - logger.info("Done\n") + logger.info("Done") # Add salomeTools tmp_sat = add_salomeTools(config, tmp_working_dir) @@ -949,9 +919,8 @@ def add_salomeTools(config, tmp_working_dir): file_or_dir) os.remove(file_path) - ff = open(local_pyconf_file, "w") - ff.write(LOCAL_TEMPLATE) - ff.close() + with open(local_pyconf_file, "w") as ff: + ff.write(LOCAL_TEMPLATE) return sat_tmp_path.path @@ -1010,12 +979,11 @@ def make_archive(prod_name, prod_info, where): :return: (str) The path of the resulting archive """ path_targz_prod = os.path.join(where, prod_name + ".tgz") - tar_prod = tarfile.open(path_targz_prod, mode='w:gz') - local_path = prod_info.source_dir - tar_prod.add(local_path, - arcname=prod_name, - exclude=exclude_VCS_and_extensions) - tar_prod.close() + with tarfile.open(path_targz_prod, mode='w:gz') as tar_prod: + local_path = prod_info.source_dir + tar_prod.add(local_path, + arcname=prod_name, + exclude=exclude_VCS_and_extensions) return path_targz_prod def create_project_for_src_package(config, tmp_working_dir, with_vcs): @@ -1035,19 +1003,12 @@ def create_project_for_src_package(config, tmp_working_dir, with_vcs): # Create in the working temporary directory the full project tree project_tmp_dir = os.path.join(tmp_working_dir, PROJECT_DIR) - products_pyconf_tmp_dir = os.path.join(project_tmp_dir, - "products") - compil_scripts_tmp_dir = os.path.join(project_tmp_dir, - "products", - "compil_scripts") - env_scripts_tmp_dir = os.path.join(project_tmp_dir, - "products", - "env_scripts") - patches_tmp_dir = os.path.join(project_tmp_dir, - "products", - "patches") - application_tmp_dir = os.path.join(project_tmp_dir, - "applications") + products_pyconf_tmp_dir = os.path.join(project_tmp_dir, "products") + compil_scripts_tmp_dir = os.path.join(project_tmp_dir, "products", "compil_scripts") + env_scripts_tmp_dir = os.path.join(project_tmp_dir, "products", "env_scripts") + patches_tmp_dir = os.path.join(project_tmp_dir, "products", "patches") + application_tmp_dir = os.path.join(project_tmp_dir, "applications") + for directory in [project_tmp_dir, compil_scripts_tmp_dir, env_scripts_tmp_dir, @@ -1058,9 +1019,8 @@ def create_project_for_src_package(config, tmp_working_dir, with_vcs): # Create the pyconf that contains the information of the project project_pyconf_name = "project.pyconf" project_pyconf_file = os.path.join(project_tmp_dir, project_pyconf_name) - ff = open(project_pyconf_file, "w") - ff.write(PROJECT_TEMPLATE) - ff.close() + with open(project_pyconf_file, "w") as ff: + ff.write(PROJECT_TEMPLATE) # Loop over the products to get there pyconf and all the scripts # (compilation, environment, patches) @@ -1235,10 +1195,10 @@ def project_package(project_file_path, tmp_working_dir): # Write the project pyconf file project_file_name = os.path.basename(project_file_path) project_pyconf_tmp_path = os.path.join(tmp_working_dir, project_file_name) - ff = open(project_pyconf_tmp_path, 'w') - ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n") - project_pyconf_cfg.__save__(ff, 1) - ff.close() + with open(project_pyconf_tmp_path, 'w') as ff: + ff.write("#!/usr/bin/env python\n#-*- coding:utf-8 -*-\n\n") + project_pyconf_cfg.__save__(ff, 1) + d_project["Project hat file"] = (project_pyconf_tmp_path, project_file_name) return d_project @@ -1249,8 +1209,7 @@ def add_readme(config, options, where): readme_path = JOIN(where, "README") with codecs.open(readme_path, "w", 'utf-8') as f: - - # templates for building the header + # templates for building the header readme_header=""" # This package was generated with sat $version # Date: $date diff --git a/commands/profile.py b/commands/profile.py index b6af8c0..810ab1d 100644 --- a/commands/profile.py +++ b/commands/profile.py @@ -104,9 +104,12 @@ class Command(_BaseCommand): return returnCode -# Class that overrides common.Reference -# in order to manipulate fields starting with '@' +######################################################################## class profileReference( PYCONF.Reference ): + """ + Class that overrides common.Reference + in order to manipulate fields starting with '@' + """ def __str__(self): s = self.elements[0] for tt, tv in self.elements[1:]: @@ -121,9 +124,11 @@ class profileReference( PYCONF.Reference ): else: return PYCONF.DOLLAR + s -## -# Class that overrides how fields starting with '@' are read. -class profileConfigReader( PYCONF.ConfigReader ) : +######################################################################## +class profileConfigReader( PYCONF.ConfigReader ): + """ + Class that overrides how fields starting with '@' are read. + """ def parseMapping(self, parent, suffix): if self.token[0] == PYCONF.LCURLY: self.match(PYCONF.LCURLY) @@ -140,10 +145,8 @@ class profileConfigReader( PYCONF.ConfigReader ) : return rv - -## -# Gets the profile name def get_profile_name ( options, config ): + """Gets the profile name""" if options.name : res = options.name else : @@ -195,13 +198,12 @@ def generate_profile_sources( config, options, logger ): #Run command os.environ["KERNEL_ROOT_DIR"] = kernel_root_dir os.environ["GUI_ROOT_DIR"] = gui_root_dir - res = SP.call(command, shell=True, env=os.environ, - stdout=logger.logTxtFile, stderr=SP.STDOUT) + res = UTS.Popen(command, env=os.environ, logger=logger) #Check result of command - if res != 0: - raise Exception(_("Cannot create application, code = %d\n") % res) + if not res.isOk(): + logger.error(_("Cannot create application")) else: - logger.info( _("Profile sources were generated in directory %s.\n" % prefix) ) + logger.info(_("Profile sources were generated in directory %s") % prefix) return res @@ -266,5 +268,7 @@ def update_pyconf( config, options, logger ): prf.addMapping( 'opt_depend', PYCONF.Sequence(), None ) #Save config - f = file( os.path.join( path, pyconf ) , 'w') - cfg.__save__(f) + with open( os.path.join( path, pyconf ) , 'w') as f: + cfg.__save__(f) + + return diff --git a/commands/script.py b/commands/script.py index b2d846f..73b54ad 100644 --- a/commands/script.py +++ b/commands/script.py @@ -92,10 +92,11 @@ class Command(_BaseCommand): # the right command(s) if options.nb_proc is None: options.nb_proc = 0 - good_result, results = run_script_all_products(config, products_infos, options.nb_proc, logger) + res = self.run_script_all_products(products_infos, options.nb_proc) # Print the final state nbExpected = len(products_infos) + good_result = sum(1 for r in res if r.isOk()) msgCount = "(%d/%d)" % (good_result, nbExpected) if good_result == nbExpected: status = "OK" @@ -108,7 +109,7 @@ class Command(_BaseCommand): return RCO.ReturnCode(status, "%s %s" % (msg, msgCount)) -def run_script_all_products(config, products_infos, nb_proc, logger): + def run_script_all_products(self, products_infos, nb_proc): """Execute the script in each product build directory. :param config: (Config) The global configuration @@ -117,23 +118,15 @@ def run_script_all_products(config, products_infos, nb_proc, logger): :param nb_proc: (int) The number of processors to use :param logger: (Logger) The logger instance to use for the display and logging - :return: (int) The number of failing commands. + :return: (list of ReturnCode) """ - # Initialize the variables that will count the fails and success - results = dict() - good_result = 0 + res = [] DBG.write("run_script_all_products", [p for p, tmp in products_infos]) for p_name_info in products_infos: - retcode = run_script_of_product(p_name_info, nb_proc, config, logger) - # show results - p_name, p_info = p_name_info - results[p_name] = retcode - if retcode.isOk(): - good_result += 1 - - return good_result, results - -def run_script_of_product(p_name_info, nb_proc, config, logger): + res.append(self.run_script_of_product(p_name_info, nb_proc)) + return results + + def run_script_of_product(self, p_name_info, nb_proc): """ Execute the proper configuration command(s) in the product build directory. @@ -146,6 +139,10 @@ def run_script_of_product(p_name_info, nb_proc, config, logger): The logger instance to use for the display and logging :return: (int) 1 if it fails, else 0. """ + # shortcuts + config = self.getConfig() + logger = self.getLogger() + p_name, p_info = p_name_info # Logging