# check that the command has been called with an application
UTS.check_config_has_application(config).raiseIfKo()
# Get the list of products to treat
- products_infos = get_products_list(options, config, logger)
+ products_infos = self.get_products_list(options, config)
# Print some informations
msg = _('Executing the check command in the build directories of the application')
'1': nb_products - res,
'2': nb_products })
- return res
-
-
-def get_products_list(options, cfg, logger):
- """
- method that gives the product list with their informations from
- configuration regarding the passed options.
-
- :param options: (Options) The Options instance that stores
- the commands arguments
- :param cfg: (Config) The global configuration
- :param logger: (Logger) The logger instance to use
- for the display and logging
- :return: (list) The list of (product name, product_informations).
- """
- # Get the products to be prepared, regarding the options
- if options.products is None:
- # No options, get all products sources
- products = cfg.APPLICATION.products
- else:
- # if option --products, check that all products of the command line
- # are present in the application.
- products = options.products
- for p in products:
- if p not in cfg.APPLICATION.products:
- msg = _("Product %(1)s not defined in application %(2)s") % \
- { '1': p, '2': cfg.VARS.application}
- raise Exception(msg)
-
- # Construct the list of tuple containing
- # the products name and their definition
- products_infos = PROD.get_products_infos(products, cfg)
-
- products_infos = [pi for pi in products_infos \
- if not(PROD.product_is_native(pi[1]) or PROD.product_is_fixed(pi[1])) ]
-
- return products_infos
+ return res
def check_all_products(config, products_infos, logger):
"""
# Get the list of products to threat
- products_infos = self.get_products_list(options, config, logger)
+ products_infos = self.get_products_list(options, config)
# Construct the list of directories to suppress
l_dir_to_suppress = []
UTS.logger_info_tuples(logger, info)
# Get the list of products to treat
- products_infos = get_products_list(options, config, logger)
+ products_infos = self.get_products_list(options, config)
if options.fathers:
# Extend the list with all recursive dependencies of the given products
if code != 0:
code = 1
return code
-
-
-def get_products_list(options, cfg, logger):
- """
- method that gives the product list with their informations from
- configuration regarding the passed options.
-
- :param options: (Options)
- The Options instance that stores the commands arguments
- :param cfg: (Config) The global configuration
- :param logger: (Logger)
- The logger instance to use for the display and logging
- :return: (list) The list of (product name, product_informations).
- """
- # Get the products to be prepared, regarding the options
- if options.products is None:
- # No options, get all products sources
- products = cfg.APPLICATION.products
- else:
- # if option --products, check that all products of the command line
- # are present in the application.
- products = options.products
- for p in products:
- if p not in cfg.APPLICATION.products:
- raise Exception(
- _("Product %(product)s not defined in application %(application)s") %
- { 'product': p, 'application': cfg.VARS.application} )
-
- # Construct the list of tuple containing
- # the products name and their definition
- products_infos = PROD.get_products_infos(products, cfg)
-
- products_infos = [pi for pi in products_infos if not(PROD.product_is_fixed(pi[1]))]
-
- return products_infos
def get_children(config, p_name_p_info):
l_res = []
UTS.check_config_has_application(config).raiseIfKo()
# Get the list of products to treat
- products_infos = get_products_list(options, config, logger)
+ products_infos = self.get_products_list(options, config)
# Print some informations
logger.info(_('Configuring the sources of the application %s\n') %
return res
-
-def get_products_list(options, cfg, logger):
- """
- method that gives the product list with their informations from
- configuration regarding the passed options.
-
- :param options: (Options)
- The Options instance that stores the commands arguments
- :param cfg: (Config) The global configuration
- :param logger: (Logger)
- The logger instance to use for the display and logging
- :return: (list) The list of (product name, product_informations).
- """
- # Get the products to be prepared, regarding the options
- if options.products is None:
- # No options, get all products sources
- products = cfg.APPLICATION.products
- else:
- # if option --products, check that all products of the command line
- # are present in the application.
- products = options.products
- for p in products:
- if p not in cfg.APPLICATION.products:
- raise Exception(
- _("Product %(product)s not defined in application %(application)s") %
- {'product': p, 'application': cfg.VARS.application} )
-
- # Construct the list of tuple containing
- # the products name and their definition
- products_infos = PROD.get_products_infos(products, cfg)
-
- products_infos = [pi for pi in products_infos \
- if not(PROD.product_is_native(pi[1]) or PROD.product_is_fixed(pi[1]))]
-
- return products_infos
-
def configure_all_products(config, products_infos, conf_option, logger):
"""
Execute the proper configuration commands
UTS.check_config_has_application(config).raiseIfKo()
# Get the list of products to treat
- products_infos = get_products_list(options, config, logger)
+ products_infos = self.get_products_list(options, config)
# Print some informations
logger.info(
msg = _("\nMake: <%s> (%d/%d)\n") % (final_status, nb_products - res, nb_products)
logger.info(msg)
- return RCO.ReturnCode(final_status, msg)
-
-
-def get_products_list(options, cfg, logger):
- """
- method that gives the product list with their informations from
- configuration regarding the passed options.
-
- :param options: (Options)
- The Options instance that stores the commands arguments
- :param cfg: (Config) The global configuration
- :param logger: (Logger)
- The logger instance to use for the display and logging
- :return: (list) The list of tuples (product name, product_informations).
- """
- # Get the products to be prepared, regarding the options
- if options.products is None:
- # No options, get all products sources
- products = cfg.APPLICATION.products
- else:
- # if option --products, check that all products of the command line
- # are present in the application.
- products = options.products
- for p in products:
- if p not in cfg.APPLICATION.products:
- raise Exception(_("Product %(product)s "
- "not defined in application %(application)s") %
- { 'product': p, 'application': cfg.VARS.application} )
-
- # Construct the list of tuple containing
- # the products name and their definition
- products_infos = PROD.get_products_infos(products, cfg)
-
- products_infos = [pi for pi in products_infos \
- if not(PROD.product_is_native(pi[1]) or PROD.product_is_fixed(pi[1]))]
-
- return products_infos
+ return RCO.ReturnCode(final_status, msg)
def make_all_products(config, products_infos, make_option, logger):
"""
UTS.check_config_has_application(config).raiseIfKo()
# Get the list of products to treat
- products_infos = get_products_list(options, config, logger)
+ products_infos = self.get_products_list(options, config)
# Print some informations
logger.info(_('Executing the make install command in the build directories of the application %s\n') % \
logger.info(msg)
return RCO.ReturnCode(final_status, msg)
-
-
-def get_products_list(options, cfg, logger):
- """
- method that gives the product list with their informations from
- configuration regarding the passed options.
-
- :param options: (Options)
- The Options instance that stores the commands arguments
- :param cfg: (Config) The global configuration
- :param logger: (Logger)
- The logger instance to use for the display and logging
- :return: (list) The list of (product name, product_informations).
- """
- # Get the products to be prepared, regarding the options
- if options.products is None:
- # No options, get all products sources
- products = cfg.APPLICATION.products
- else:
- # if option --products, check that all products of the command line
- # are present in the application.
- products = options.products
- for p in products:
- if p not in cfg.APPLICATION.products:
- raise Exception(_("Product %(product)s "
- "not defined in application %(application)s") %
- { 'product': p, 'application': cfg.VARS.application} )
-
- # Construct the list of tuple containing
- # the products name and their definition
- products_infos = PROD.get_products_infos(products, cfg)
-
- products_infos = [pi for pi in products_infos \
- if not(PROD.product_is_native(pi[1]) or PROD.product_is_fixed(pi[1]))]
-
- return products_infos
def makeinstall_all_products(config, products_infos, logger):
"""
logger.info(' workdir = %s\n\n"', UTS.blue(config.APPLICATION.workdir))
# Get the products list with products informations regarding the options
- products_infos = commands.prepare.get_products_list(options, config, logger)
+ products_infos = self.get_products_list(options, config)
# Get the maximum name length in order to format the terminal display
max_product_name_len = 1
# check that the command has been called with an application
UTS.check_config_has_application(config).raiseIfKo()
- products_infos = self.get_products_list(options, config, logger)
+ products_infos = self.get_products_list(options, config)
# Construct the arguments to pass to the clean, source and patch commands
args_appli = config.VARS.application + ' '
UTS.check_config_has_application(config).raiseIfKo()
# Get the list of products to treat
- products_infos = get_products_list(options, config, logger)
+ products_infos = self.get_products_list(options, config)
# Print some informations
msg = ('Executing the script in the build directories of the application %s\n') % \
(final_status, nb_products - res, nb_products) )
return res
-
-
-def get_products_list(options, cfg, logger):
- """
- Gives the product list with their informations from
- configuration regarding the passed options.
-
- :param options: (Options)
- The Options instance that stores the commands arguments
- :param cfg: (Config) The global configuration
- :param logger: (Logger)
- The logger instance to use for the display and logging
- :return: (list) The list of (product name, product_informations).
- """
- # Get the products to be prepared, regarding the options
- if options.products is None:
- # No options, get all products sources
- products = cfg.APPLICATION.products
- else:
- # if option --products, check that all products of the command line
- # are present in the application.
- products = options.products
- for p in products:
- if p not in cfg.APPLICATION.products:
- raise Exception(_("Product %(product)s "
- "not defined in application %(application)s") % \
- { 'product': p, 'application': cfg.VARS.application} )
-
- # Construct the list of tuple containing
- # the products name and their definition
- products_infos = PROD.get_products_infos(products, cfg)
-
- products_infos = [pi for pi in products_infos \
- if not(PROD.product_is_native(pi[1]) or PROD.product_is_fixed(pi[1]))]
-
- return products_infos
def run_script_all_products(config, products_infos, nb_proc, logger):
"""Execute the script in each product build directory.
logger.info(" workdir = %s\n" % config.APPLICATION.workdir)
# Get the products list with products informations regarding the options
- products_infos = self.get_products_list(options, config, logger)
+ products_infos = self.get_products_list(options, config)
# Call to the function that gets all the sources
good_result, results = get_all_product_sources(config,
if windll is not None:
winterm = WinTerm()
+
def is_stream_closed(stream):
return not hasattr(stream, 'closed') or stream.closed
def _getConfig(self, appliToLoad):
- """
- Load the configuration (all pyconf)
- and returns the config from some files .pyconf
- """
- if self.runner.config is not None:
- raise Exception("config existing yet in '%s' instance" % self.runner.getClassName())
-
-
- # read the configuration from all the pyconf files
- cfgManager = getConfigManager() # commands.config.ConfigManager()
- DBG.write("appli to load", appliToLoad, True)
- config = cfgManager.get_config(datadir=self.runner.datadir,
- application=appliToLoad,
- options=self.runner.options,
- command=self.name)
- self.runner.nameAppliLoaded = appliToLoad
- # DBG.write("appli loaded", config, True)
-
- # Set the verbose mode if called
- DBG.tofix("verbose/batch/logger_add_link -1/False/None", True)
- verbose = -1
- batch = False
- logger_add_link = None
- if verbose > -1:
- verbose_save = self.options.output_verbose_level
- self.options.__setattr__("output_verbose_level", verbose)
-
- # Set batch mode if called
- if batch:
- batch_save = self.options.batch
- self.options.__setattr__("batch", True)
-
- # set output level
- if self.runner.options.output_verbose_level is not None:
- config.USER.output_verbose_level = self.runner.options.output_verbose_level
- if config.USER.output_verbose_level < 1:
- config.USER.output_verbose_level = 0
- silent = (config.USER.output_verbose_level == 0)
-
- # create log file
- micro_command = False
- if logger_add_link:
- micro_command = True
- logger_command = UTS.getNewLogger(config,
- silent_sysstd=silent,
- all_in_terminal=self.runner.options.all_in_terminal,
- micro_command=micro_command)
-
- # Check that the path given by the logs_paths_in_file option
- # is a file path that can be written
- if self.runner.options.logs_paths_in_file and not micro_command:
- try:
- self.options.logs_paths_in_file = os.path.abspath(
- self.options.logs_paths_in_file)
- dir_file = os.path.dirname(self.options.logs_paths_in_file)
- if not os.path.exists(dir_file):
- os.makedirs(dir_file)
- if os.path.exists(self.options.logs_paths_in_file):
- os.remove(self.options.logs_paths_in_file)
- file_test = open(self.options.logs_paths_in_file, "w")
- file_test.close()
- except Exception as e:
- msg = _("""\
+ """
+ Load the configuration (all pyconf)
+ and returns the config from some files .pyconf
+ """
+ if self.runner.config is not None:
+ raise Exception("config existing yet in '%s' instance" % self.runner.getClassName())
+
+
+ # read the configuration from all the pyconf files
+ cfgManager = getConfigManager() # commands.config.ConfigManager()
+ DBG.write("appli to load", appliToLoad, True)
+ config = cfgManager.get_config(datadir=self.runner.datadir,
+ application=appliToLoad,
+ options=self.runner.options,
+ command=self.name)
+ self.runner.nameAppliLoaded = appliToLoad
+ # DBG.write("appli loaded", config, True)
+
+ # Set the verbose mode if called
+ DBG.tofix("verbose/batch/logger_add_link -1/False/None", True)
+ verbose = -1
+ batch = False
+ logger_add_link = None
+ if verbose > -1:
+ verbose_save = self.options.output_verbose_level
+ self.options.__setattr__("output_verbose_level", verbose)
+
+ # Set batch mode if called
+ if batch:
+ batch_save = self.options.batch
+ self.options.__setattr__("batch", True)
+
+ # set output level
+ if self.runner.options.output_verbose_level is not None:
+ config.USER.output_verbose_level = self.runner.options.output_verbose_level
+ if config.USER.output_verbose_level < 1:
+ config.USER.output_verbose_level = 0
+ silent = (config.USER.output_verbose_level == 0)
+
+ # create log file
+ micro_command = False
+ if logger_add_link:
+ micro_command = True
+ logger_command = UTS.getNewLogger(config,
+ silent_sysstd=silent,
+ all_in_terminal=self.runner.options.all_in_terminal,
+ micro_command=micro_command)
+
+ # Check that the path given by the logs_paths_in_file option
+ # is a file path that can be written
+ if self.runner.options.logs_paths_in_file and not micro_command:
+ try:
+ self.options.logs_paths_in_file = os.path.abspath(
+ self.options.logs_paths_in_file)
+ dir_file = os.path.dirname(self.options.logs_paths_in_file)
+ if not os.path.exists(dir_file):
+ os.makedirs(dir_file)
+ if os.path.exists(self.options.logs_paths_in_file):
+ os.remove(self.options.logs_paths_in_file)
+ file_test = open(self.options.logs_paths_in_file, "w")
+ file_test.close()
+ except Exception as e:
+ msg = _("""\
The logs_paths_in_file option will not be taken into account.
Here is the error:""")
- logger_command.warning("%s\n%s" % (msg, str(e)))
- self.options.logs_paths_in_file = None
-
- return config
-
-def get_products_list(self, options, cfg, logger):
- """
- Gives the product list with their informations from
- configuration regarding the passed options.
-
- :param options: (Options)
- The Options instance that stores the commands arguments
- :param config: (Config) The global configuration
- :param logger: (Logger)
- The logger instance to use for the display and logging
- :return: (list) The list of (product name, product_informations).
- """
- # Get the products to be prepared, regarding the options
- if options.products is None:
- # No options, get all products sources
- products = cfg.APPLICATION.products
- else:
- # if option --products, check that all products of the command line
- # are present in the application.
- products = options.products
- for p in products:
- if p not in cfg.APPLICATION.products:
- raise Exception(_("Product %(product)s "
- "not defined in application %(application)s") %
- { 'product': p, 'application': cfg.VARS.application} )
-
- # Construct the list of tuple containing
- # the products name and their definition
- products_infos = PROD.get_products_infos(products, cfg)
-
- return products_infos
\ No newline at end of file
+ logger_command.warning("%s\n%s" % (msg, str(e)))
+ self.options.logs_paths_in_file = None
+
+ return config
+
+def get_products_list(options, cfg):
+ """
+ Gives the product list with their informations from
+ configuration regarding the passed options.
+
+ :param options: (Options)
+ The Options instance that stores the commands arguments
+ :param cfg: (Config) The global configuration
+ :param logger: (Logger)
+ The logger instance to use for the display and logging
+ :return: (list) The list of (product name, product_informations).
+ """
+ # Get the products to be prepared, regarding the options
+ if options.products is None:
+ # No options, get all products sources
+ products = cfg.APPLICATION.products
+ else:
+ # if option --products, check that all products of the command line
+ # are present in the application.
+ products = options.products
+ for p in products:
+ if p not in cfg.APPLICATION.products:
+ raise Exception(_("Product %(product)s "
+ "not defined in application %(application)s") % \
+ { 'product': p, 'application': cfg.VARS.application} )
+
+ # Construct the list of tuple containing
+ # the products name and their definition
+ products_infos = PROD.get_products_infos(products, cfg)
+
+ products_infos = [pi for pi in products_infos \
+ if not(PROD.product_is_native(pi[1]) or PROD.product_is_fixed(pi[1]))]
+
+ return products_infos
\ No newline at end of file
def saveConfigDbg(config, aStream, indent=0, path=""):
"""pyconf returns multilines (path expression evaluation) for debug"""
- _saveConfigRecursiveDbg(config, aStream, indent, path)
+ _saveConfigRecursiveDbg(config, aStream, indent, path, 0)
aStream.close() # as config.__save__()
-def _saveConfigRecursiveDbg(config, aStream, indent, path):
+def _saveConfigRecursiveDbg(config, aStream, indent, path, nb):
"""pyconf inspired from Mapping.__save__"""
debug = False
+ nbp = nb + 1 # depth recursive
if indent <= 0:
indentp = 0
else:
- indentp = indentp + 2
+ indentp = indent + 2
+
+ if nbp > 10: # protection
+ # raise Exception("!!! ERROR: Circular reference after %s" % aStream.getvalue())
+ # raise Exception("!!! ERROR: Circular reference %s" % path)
+ aStream.write("<red>!!! ERROR: Circular reference after %s<reset>\n" % path)
+ return
+
indstr = indent * ' ' # '':no indent, ' ':indent
strType = str(type(config))
if debug: print "saveDbg Type", path, strType
if "Sequence" in strType:
for i in range(len(config)):
- _saveConfigRecursiveDbg(config[i], aStream, indentp, path+"[%i]" % i)
+ _saveConfigRecursiveDbg(config[i], aStream, indentp, path+"[%i]" % i, nbp)
return
'''
if "Reference" in strType:
strType = str(type(value))
if debug: print 'strType', path, key, strType
if "Config" in strType:
- _saveConfigRecursiveDbg(value, aStream, indentp, path+"."+key)
+ _saveConfigRecursiveDbg(value, aStream, indentp, path+"."+key, nbp)
continue
if "Mapping" in strType:
- _saveConfigRecursiveDbg(value, aStream, indentp, path+"."+key)
+ _saveConfigRecursiveDbg(value, aStream, indentp, path+"."+key, nbp)
continue
if "Sequence" in strType:
for i in range(len(value)):
- _saveConfigRecursiveDbg(value.data[i], aStream, indentp, path+"."+key+"[%i]" % i)
+ _saveConfigRecursiveDbg(value.data[i], aStream, indentp, path+"."+key+"[%i]" % i, nbp)
continue
if "Expression" in strType:
try:
return COLS.toColor(res)
def setColorLevelname(self, levelname):
- return self._ColorLevelname[levelname] + levelname + "<reset>"
+ return levelname
+ # set color implies problem tabulation variable for color special characters
+ # return self._ColorLevelname[levelname] + levelname + "<reset>"
class UnittestFormatter(logging.Formatter):
return res
def write(self, astr):
- # log("UnittestStream.write('%s')" % astr)
+ """final method called when message is logged"""
+ # log("UnittestStream.write('%s')" % astr) # for debug ...
self._logs += astr
def flush(self):
else:
logger.setLevel(logger.DEBUG)
+def setFileHandler(logger, config):
+ """
+ add file handler to logger to set log files
+ for salometools command.
+ when command is known from pyconf/config instance
+
+ | Example:
+ | log files names for command prepare
+ | with micro commands clean/source/patch
+ | ~/LOGS/20180510_140606_prepare_lenovo.xml
+ | ~/LOGS/OUT/20180510_140606_prepare_lenovo.txt
+ | ~/LOGS/micro_20180510_140607_clean_lenovo.xml
+ | ~/LOGS/OUT/micro_20180510_140607_clean_lenovo.txt
+ | etc.
+ """
+ import src.debug as DBG # avoid cross import
+ DBG.write("setFileHandler", logger.handlers, True)
+ DBG.write("setFileHandler", config.VARS, True)
def testLogger_1(logger):
"""small test"""
-#!/usr/bin/env python
-#-*- coding:utf-8 -*-
-
-# Copyright 2004-2007 by Vinay Sajip. All Rights Reserved.
+# Copyright 2004-2010 by Vinay Sajip. All Rights Reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-# Copyright (C) 2010-2013 CEA/DEN
+# Copyright (C) 2010-2018 CEA/DEN
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
See U{this tutorial<http://www.red-dove.com/python_config.html|_blank>} for more
information.
-#modified for salomeTools
-@version: 0.3.7.1
+@version: 0.3.9.1
@author: Vinay Sajip
-@copyright: Copyright (C) 2004-2007 Vinay Sajip. All Rights Reserved.
+@copyright: Copyright (C) 2004-2010 Vinay Sajip. All Rights Reserved.
@var streamOpener: The default stream opener. This is a factory function which
__author__ = "Vinay Sajip <vinay_sajip@red-dove.com>"
__status__ = "alpha"
-__version__ = "0.3.7.1" #modified for salomeTools
-__date__ = "05 October 2007"
+__version__ = "0.3.9.1" # cvw modified for salomeTools
+__date__ = "11 May 2018"
+
+from types import StringType, UnicodeType
import codecs
+import logging
import os
import sys
NEWLINE = '\n'
try:
+ import encodings.utf_32
has_utf32 = True
except:
has_utf32 = False
+try:
+ from logging.handlers import NullHandler
+except ImportError:
+ class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+logger = logging.getLogger(__name__)
+if not logger.handlers:
+ logger.addHandler(NullHandler())
+
class ConfigInputStream(object):
"""
An input stream which can read either ANSI files with default encoding
line = u''
while True:
c = self.stream.read(1)
- if isinstance(c, bytes):
- c = c.decode()
if c:
line += c
if c == '\n':
@return: A stream with the specified name.
@rtype: A read-only stream (file-like object)
"""
- return ConfigInputStream(open(name, 'rb'))
+ return ConfigInputStream(file(name, 'rb'))
streamOpener = None
Make a path from a prefix and suffix.
Examples::
- makePath('', 'suffix') -> 'suffix'
- makePath('prefix', 'suffix') -> 'prefix.suffix'
- makePath('prefix', '[1]') -> 'prefix[1]'
- @param prefix: The prefix to use. If it evaluates as false, the suffix is returned.
+ makePath('', 'suffix') -> 'suffix'
+ makePath('prefix', 'suffix') -> 'prefix.suffix'
+ makePath('prefix', '[1]') -> 'prefix[1]'
+
+ @param prefix: The prefix to use. If it evaluates as false, the suffix
+ is returned.
@type prefix: str
- @param suffix: The suffix to use. It is either an identifier or an index in brackets.
+ @param suffix: The suffix to use. It is either an identifier or an
+ index in brackets.
@type suffix: str
- @return: The path concatenation of prefix and suffix, with a dot if the suffix is not a bracketed index.
+ @return: The path concatenation of prefix and suffix, with a
+ dot if the suffix is not a bracketed index.
@rtype: str
+
"""
if not prefix:
rv = suffix
if isinstance(value, Reference) or isinstance(value, Expression):
stream.write('%s%r%s' % (indstr, value, NEWLINE))
else:
- if isinstance(value, str): # and not isWord(value):
+ if (type(value) is StringType): # and not isWord(value):
value = repr(value)
stream.write('%s%s%s' % (indstr, value, NEWLINE))
self.sys = sys
self.os = os
+ def __repr__(self):
+ return "<Namespace('%s')>" % ','.join(self.__dict__.keys())
+
def __init__(self, streamOrFile=None, parent=None, PWD = None):
"""
Initializes an instance.
Mapping.__init__(self, parent)
object.__setattr__(self, 'reader', ConfigReader(self))
object.__setattr__(self, 'namespaces', [Config.Namespace()])
+ object.__setattr__(self, 'resolving', set())
if streamOrFile is not None:
- if isinstance(streamOrFile, str) or isinstance(streamOrFile, bytes):
+ if isinstance(streamOrFile, StringType) or isinstance(streamOrFile, UnicodeType):
global streamOpener
if streamOpener is None:
streamOpener = defaultStreamOpener
@raise ConfigFormatError: if there is a syntax error in the stream.
"""
reader = object.__getattribute__(self, 'reader')
+ #object.__setattr__(self, 'root', reader.load(stream))
reader.load(stream)
stream.close()
@raise ConfigError: If the path is invalid
"""
s = 'self.' + path
- return eval(s)
- '''try:
+ try:
return eval(s)
- except Exception as e:
- # raise ConfigError("Config path not found: '%s'" % path)
- raise ConfigError(e)'''
+ except Exception, e:
+ raise ConfigError(str(e))
class Sequence(Container):
"""
def resolve(self, container):
"""
- Resolve this instance in the context of a container.
+ Resolve .7 this instance in the context of a container.
@param container: The container to resolve from.
@type container: L{Container}
@rtype: any
@raise ConfigResolutionError: If resolution fails.
"""
+ # TODO
+ # resolve 0391 with current = self.findConfig(container) creates Circular reference message
rv = None
path = object.__getattribute__(container, 'path')
current = container
if self.type == BACKTICK:
namespaces = object.__getattribute__(current, 'namespaces')
found = False
+ s = str(self)[1:-1]
for ns in namespaces:
try:
- rv = eval(str(self)[1:-1], vars(ns))
+ try:
+ rv = eval(s, vars(ns))
+ except TypeError: #Python 2.7 - vars is a dictproxy
+ rv = eval(s, {}, vars(ns))
found = True
break
except:
+ logger.debug("unable to resolve %r in %r", s, ns)
pass
if found:
break
raise ConfigResolutionError("unable to evaluate %r in the configuration %s" % (self, path))
return rv
+ def resolve_9(self, container):
+ """
+ Resolve .9 this instance in the context of a container.
+
+ @param container: The container to resolve from.
+ @type container: L{Container}
+ @return: The resolved value.
+ @rtype: any
+ @raise ConfigResolutionError: If resolution fails.
+ """
+ print "yyyyooo"
+ rv = None
+ path = object.__getattribute__(container, 'path')
+ current = self.findConfig(container)
+ while current is not None:
+ if self.type == BACKTICK:
+ namespaces = object.__getattribute__(current, 'namespaces')
+ found = False
+ s = str(self)[1:-1]
+ for ns in namespaces:
+ try:
+ try:
+ rv = eval(s, vars(ns))
+ except TypeError: #Python 2.7 - vars is a dictproxy
+ rv = eval(s, {}, vars(ns))
+ found = True
+ break
+ except:
+ logger.debug("unable to resolve %r in %r", s, ns)
+ pass
+ if found:
+ break
+ else:
+ firstkey = self.elements[0]
+ if firstkey in current.resolving:
+ current.resolving.remove(firstkey)
+ raise ConfigResolutionError("Circular reference: %r" % firstkey)
+ current.resolving.add(firstkey)
+ key = firstkey
+ try:
+ rv = current[key]
+ for item in self.elements[1:]:
+ key = item[1]
+ rv = rv[key]
+ current.resolving.remove(firstkey)
+ break
+ except ConfigResolutionError:
+ raise
+ except:
+ logger.debug("Unable to resolve %r: %s", key, sys.exc_info()[1])
+ rv = None
+ pass
+ current.resolving.discard(firstkey)
+ current = self.findConfig(object.__getattribute__(current, 'parent'))
+ if current is None:
+ raise ConfigResolutionError("unable to evaluate %r in the configuration %s" % (self, path))
+ return rv
+
def __str__(self):
s = self.elements[0]
for tt, tv in self.elements[1:]:
elif c in self.digits:
token = c
tt = NUMBER
+ in_exponent=False
while True:
c = self.getChar()
if not c:
break
if c in self.digits:
token += c
- elif (c == '.') and token.find('.') < 0:
+ elif (c == '.') and token.find('.') < 0 and not in_exponent:
+ token += c
+ elif (c == '-') and token.find('-') < 0 and in_exponent:
+ token += c
+ elif (c in 'eE') and token.find('e') < 0 and\
+ token.find('E') < 0:
token += c
+ in_exponent = True
else:
if c and (c not in self.whitespace):
self.pbchars.append(c)
value = True
try:
parent.addMapping(key, value, comment)
- except Exception as e:
+ except Exception, e:
raise ConfigFormatError("%s: %s, %r" % (self.location(), e,
self.token[1]))
tt = self.token[0]
comment = self.comment
tt = self.token[0]
while tt in [STRING, WORD, NUMBER, LCURLY, LBRACK, LPAREN, DOLLAR,
- TRUE, FALSE, NONE, BACKTICK]:
+ TRUE, FALSE, NONE, BACKTICK, MINUS]:
suffix = '[%d]' % len(rv)
value = self.parseValue(parent, suffix)
rv.append(value, comment)
self.match(RCURLY)
else:
self.match(AT)
- _, fn = self.match(STRING)
+ tt, fn = self.match(STRING)
rv = Config(eval(fn), parent)
return rv
@type map2: L{Mapping}.
@param key: The key in map2 (which also exists in map1).
@type key: str
- @return: One of "merge", "append", "mismatch" or "overwrite"
- indicating what action should be taken. This should
- be appropriate to the objects being merged - e.g.
- there is no point returning "merge" if the two objects
- are instances of L{Sequence}.
+ @return: One of "merge", "append", "mismatch" or "overwrite"
+ indicating what action should be taken. This should
+ be appropriate to the objects being merged - e.g.
+ there is no point returning "merge" if the two objects
+ are instances of L{Sequence}.
@rtype: str
"""
obj1 = map1[key]
except ConfigError:
pass
if not found:
- raise ConfigError("ConfigList path not found '%r'" % path)
+ raise ConfigError("unable to resolve %r" % path)
return rv
DBG.write("setNotLocale", os.environ["LANG"])
def setLocale():
- """\
+ """
reset initial locale at any moment
'fr' or else (TODO) from initial environment var '$LANG'
'i18n' as 'internationalization'
if self._runner.config is None:
self._logger.error("%s instance have runner.config None, fix it." % self.getClassName())
return self._runner.config
+
+ def get_products_list(self, options, config):
+ return CFGMGR.get_products_list(options, config)
def assumeAsList(self, strOrList):
return assumeAsList(strOrList)
else:
return self._getModule(name)
- def getCommandInstance(self, name):
+ def getCommand(self, name):
"""
returns inherited instance of Command(_BaseCmd) for command 'name'
if module not loaded yet, load it.
datadir=None)
# create/get dynamically the command instance to call its 'run' method
- cmdInstance = self.getCommandInstance(self.nameCommandToLoad)
+ cmdInstance = self.getCommand(self.nameCommandToLoad)
+ import src.loggingSat as LOG # avoid cross import
+ LOG.setFileHandler(self.getLogger(), self.getConfig())
# Run the command using the arguments
returnCode = cmdInstance.run(self.commandArguments)
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
-Utilities to read xml logging files
+Utilities to manage write/read xml logging files
| Usage:
| >> import src.xmlManager as XMLMGR
import src.ElementTree as ETREE
import src.utilsSat as UTS
+
class XmlLogFile(object):
- """\
+ """
Class to manage writing in salomeTools xml log file
"""
def __init__(self, filePath, rootname, attrib = {}):
if file_path:
log_file_path = file_path
try:
- f = open(log_file_path, 'w')
- f.write("<?xml version='1.0' encoding='utf-8'?>\n")
- if stylesheet:
- f.write("<?xml-stylesheet type='text/xsl' href='%s'?>\n" %
- stylesheet)
- f.write(ETREE.tostring(self.xmlroot, encoding='utf-8'))
- f.close()
- except IOError:
- pass
+ with open(log_file_path, 'w') as f:
+ f.write("<?xml version='1.0' encoding='utf-8'?>\n")
+ if stylesheet:
+ f.write("<?xml-stylesheet type='text/xsl' href='%s'?>\n" %
+ stylesheet)
+ f.write(ETREE.tostring(self.xmlroot, encoding='utf-8'))
+ except Exception:
+ raise Exception("problem writing Xml log file: %s" % log_file_path)
def add_simple_node(self, node_name, text=None, attrib={}):
"""Add a node with some attibutes and text to the root node.
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
- f = open(filename, "w")
- f.write("<?xml version='1.0' encoding='utf-8'?>\n")
- if len(stylesheet) > 0:
- f.write("<?xml-stylesheet type='text/xsl' href='%s'?>\n" % stylesheet)
- f.write(ETREE.tostring(xmlroot, encoding='utf-8'))
- f.close()
-
\ No newline at end of file
+ with open(filename, "w") as f:
+ f.write("<?xml version='1.0' encoding='utf-8'?>\n")
+ if len(stylesheet) > 0:
+ f.write("<?xml-stylesheet type='text/xsl' href='%s'?>\n" % stylesheet)
+ f.write(ETREE.tostring(xmlroot, encoding='utf-8'))
+
+
--- /dev/null
+#!/usr/bin/env python
+#-*- coding:utf-8 -*-
+
+# Copyright (C) 2010-2018 CEA/DEN
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+"""
+see: http://sametmax.com/ecrire-des-logs-en-python
+
+| # creation d'un formateur qui va ajouter le temps, le niveau
+| # de chaque message quand on ecrira un message dans le log
+| formatter = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s')
+|
+| # creation d'un handler qui va rediriger une ecriture du log vers
+| # un fichier en mode 'append', avec 1 backup et une taille max de 1Mo
+| file_handler = RotatingFileHandler('activity.log', 'a', 1000000, 1)
+|
+| # on lui met le niveau sur DEBUG, on lui dit qu'il doit utiliser le formateur
+| # cree precedement et on ajoute ce handler au logger
+| file_handler.setLevel(logging.DEBUG)
+| file_handler.setFormatter(formatter)
+| logger.addHandler(file_handler)
+|
+| # creation d'un second handler qui va rediriger chaque ecriture de log
+| # sur la console
+| stream_handler = logging.StreamHandler()
+| stream_handler.setLevel(logging.DEBUG)
+| logger.addHandler(stream_handler)
+|
+| # Après 3 heures, on peut enfin logguer
+| # Il est temps de spammer votre code avec des logs partout :
+| logger.info('Hello')
+| logger.warning('Testing %s', 'foo')
+"""
+
+import os
+import sys
+import unittest
+import pprint as PP
+import logging as LOGI
+from logging.handlers import BufferingHandler
+
+import src.debug as DBG
+
+verbose = False #True
+
+class LoggerSat(LOGI.Logger):
+ """
+ Elementary prototype for logger sat
+ add a level TRACE as log.trace(msg)
+ below log.info(msg)
+ above log.debug(msg)
+ to assume store long log asci in files txt under/outside files xml
+
+ see: /usr/lib/python2.7/logging/*.py &
+ """
+
+ _TRACE = LOGI.INFO - 2 # just below
+
+ def __init__(self, name, level=LOGI.INFO):
+ """
+ Initialize the logger with a name and an optional level.
+ """
+ super(LoggerSat, self).__init__(name, level)
+ LOGI.addLevelName(self._TRACE, "TRACE")
+ # LOGI.TRACE = self._TRACE # only for coherency,
+
+ def trace(self, msg, *args, **kwargs):
+ """
+ Log 'msg % args' with severity '_TRACE'.
+
+ To pass exception information, use the keyword argument exc_info with
+ a true value, e.g.
+
+ logger.trace("Houston, we have a %s", "long trace to follow")
+ """
+ if self.isEnabledFor(self._TRACE):
+ self._log(self._TRACE, msg, args, **kwargs)
+
+class TestCase(unittest.TestCase):
+ "Test the debug.py"""
+
+ initialLoggerClass = [] # to keep clean module logging
+
+ def test_000(self):
+ # one shot setUp() for this TestCase
+ self.initialLoggerClass.append(LOGI._loggerClass)
+ LOGI.setLoggerClass(LoggerSat)
+ if verbose:
+ DBG.push_debug(True)
+ # DBG.write("assert unittest", [a for a in dir(self) if "assert" in a])
+ pass
+
+ def test_999(self):
+ # one shot tearDown() for this TestCase
+ if verbose:
+ DBG.pop_debug()
+ LOGI.setLoggerClass(self.initialLoggerClass[0])
+ return
+
+ def test_010(self):
+ # LOGI.setLoggerClass(LoggerSat) # done once in test_000
+ name = "testLogging"
+ lgr = LOGI.getLogger(name) # create it
+ lgr.setLevel("DEBUG")
+ self.assertEqual(lgr.__class__, LoggerSat)
+ self.assertEqual(lgr.name, name)
+ self.assertIn("trace", dir(lgr))
+ self.assertIn("TRACE", LOGI._levelNames.keys())
+ self.assertIn(lgr._TRACE, LOGI._levelNames.keys())
+ self.assertEqual(LOGI.getLevelName(LOGI.INFO), "INFO")
+ self.assertEqual(LOGI.getLevelName(lgr._TRACE), "TRACE")
+
+ # creation d'un handler pour chaque log sur la console
+ formatter = LOGI.Formatter('%(levelname)-8s :: %(message)s')
+ # stream_handler = LOGI.handlers.StreamHandler() # log outputs in console
+ stream_handler = LOGI.handlers.BufferingHandler(1000) # logoutputs in memory
+ stream_handler.setLevel(LOGI.DEBUG)
+ stream_handler.setFormatter(formatter)
+ lgr.addHandler(stream_handler)
+ # print # skip one line if outputs in console
+ lgr.warning("!!! test warning")
+ lgr.info("!!! test info")
+ lgr.trace("!!! test trace")
+ lgr.debug("!!! test debug")
+ self.assertEqual(len(stream_handler.buffer), 4)
+ rec = stream_handler.buffer[-1]
+ self.assertEqual(rec.levelname, "DEBUG")
+ self.assertEqual(rec.msg, "!!! test debug")
+ self.assertEqual(stream_handler.get_name(), None) # what to serve ?
+
+ def test_020(self):
+ # LOGI.setLoggerClass(LoggerSat)
+ name = "testLogging"
+ lgr = LOGI.getLogger(name) # find it as created yet in test_010
+ stream_handler = lgr.handlers[0]
+ rec = stream_handler.buffer[-1]
+ self.assertEqual(rec.levelname, "DEBUG")
+ self.assertEqual(rec.msg, "!!! test debug")
+
+
+ """
+ def test_015(self):
+ t = DATT.DateTime("now")
+ self.assertTrue(t.isOk())
+ rrt = str(t)
+ DBG.write("test_015 str", rrt)
+ self.assertIn("20", rrt) # 2018 to 2099 ok
+ self.assertIn("-", rrt)
+ self.assertIn(":", rrt)
+ rrt = repr(t)
+ DBG.write("test_015 repr", rrt)
+ self.assertIn("DateTime", rrt)
+ self.assertIn("20", rrt) # 2018 to 2099 ok
+ self.assertIn("-", rrt)
+ self.assertIn(":", rrt)
+
+
+ def test_020(self):
+ t1 = DATT.DateTime("now")
+ t2 = DATT.DateTime(t1)
+ self.assertTrue(t2.isOk())
+ self.assertEqual(t1, t2)
+ t2 = DATT.DateTime("now")
+ self.assertNotEqual(t1, t2) # microseconds differs
+
+ DATT.sleep(3) # 3 second more
+ t2 = DATT.DateTime("now")
+ self.assertGreater(2, 1) # to be sure
+ self.assertGreater(str(t2), str(t1)) # seconds differs
+ self.assertGreater(repr(t2), repr(t1)) # seconds differs
+ self.assertGreater(t2, t1)
+ self.assertTrue(t2 > t1)
+ self.assertFalse(t2 == t1)
+ self.assertFalse(t2 < t1)
+ self.assertFalse(t2 <= t1)
+
+ def test_040(self):
+ t1 = DATT.DateTime("now")
+ delta = DATT.DeltaTime(t1)
+ self.assertFalse(delta.isOk())
+ self.assertIn("Undefined", delta.toSeconds())
+ DBG.write("test_040 str", str(delta))
+ DBG.write("test_040 repr", repr(delta))
+ with self.assertRaises(Exception):
+ delta.raiseIfKo()
+ DATT.DateTime().raiseIfKo()
+
+ def test_042(self):
+ t1 = DATT.DateTime("now")
+ DATT.sleep(3.1) # 3.1 second more
+ t2 = DATT.DateTime("now")
+ self.assertTrue(t2 > t1)
+ delta = DATT.DeltaTime(t1, t2)
+ self.assertGreater(delta.toSeconds(), 3)
+ self.assertEqual(int(delta.toSeconds()), 3)
+ DBG.write("test_042 str", str(delta))
+ DBG.write("test_042 repr", repr(delta))
+ delta2 = delta.raiseIfKo()
+ self.assertEqual(delta2.toSeconds(), delta.toSeconds())
+
+ def test_044(self):
+ for more in [0, 0.56789, 5.6789, 56.789, 61, 3661, 36061]:
+ t1 = DATT.DateTime("now")
+ t2 = DATT.DateTime(t1)
+ t2.addSeconds(more)
+ delta = DATT.DeltaTime(t1, t2)
+ r = delta.toStrHuman()
+ DBG.write("test_044 str", r)
+ if more < 60:
+ self.assertIn("s", r)
+ self.assertNotIn("m", r)
+ self.assertNotIn("h", r)
+ continue
+ if more < 3600:
+ self.assertIn("s", r)
+ self.assertIn("m", r)
+ self.assertNotIn("h", r)
+ else:
+ self.assertIn("s", r)
+ self.assertIn("m", r)
+ self.assertIn("h", r)"""
+
+
+
+
+if __name__ == '__main__':
+ unittest.main(exit=False)
+ pass
+
}
""",
+# error circular
+6 : """\
+ aa: Yves
+ bb: $cc
+ cc: $bb
+""",
+
+7 : """\
+ aa: Yves
+ bb: $cc
+ cc: [
+ cc1
+ $bb
+ ]
+""",
+
+8 : """\
+ aa: Yves
+ bb: $cc
+ cc: {
+ cc1: cc11
+ cc2: $bb
+ }
+""",
}
res = outStream.value
DBG.write("test_100 cfg save", res)
DBG.write("test_100 cfg debug", cfg)
- DBG.write("test_100 cfg debug", cfg.cc)
+ DBG.write("test_100 cfg.cc debug", cfg.cc)
+
+ cc = cfg.cc
+ # DBG.write("test_100 type cc[3]", dir(cc), True)
+ DBG.write("test_100 cc", [cc.data[i] for i in range(len(cc))])
+
+ def test_100(self):
+ inStream = DBG.InStream(_EXAMPLES[5])
+ outStream = DBG.OutStream()
+ cfg = PYF.Config(inStream) # KO
+ cfg.__save__(outStream) # sat renamed save() in __save__()
+ res = outStream.value
+ DBG.write("test_100 cfg save", res)
+ DBG.write("test_100 cfg debug", cfg)
+ DBG.write("test_100 cfg.cc debug", cfg.cc)
cc = cfg.cc
# DBG.write("test_100 type cc[3]", dir(cc), True)
DBG.write("test_100 cc", [cc.data[i] for i in range(len(cc))])
+ def test_110(self):
+ inStream = DBG.InStream(_EXAMPLES[6])
+ outStream = DBG.OutStream()
+ cfg = PYF.Config(inStream)
+ cfg.__save__(outStream)
+
+ res = outStream.value
+ DBG.write("test_110 cfg save", res)
+ self.assertNotIn("ERROR", res)
+
+ res = DBG.getStrConfigDbg(cfg)
+ DBG.write("test_110 cfg debug", res)
+ self.assertIn("ERROR", res)
+ self.assertIn("unable to evaluate $cc", res)
+ self.assertIn("unable to evaluate $bb", res)
+
+ def test_120(self):
+ for ii in [7, 8]:
+ inStream = DBG.InStream(_EXAMPLES[ii])
+ outStream = DBG.OutStream()
+ cfg = PYF.Config(inStream)
+ cfg.__save__(outStream)
+
+ res = outStream.value
+ DBG.write("test_120 cfg save", res, True)
+ self.assertNotIn("ERROR", res)
+
+ res = DBG.getStrConfigDbg(cfg)
+ DBG.write("test_120 cfg debug", res, True)
+ # no error circular !!!
+ # self.assertIn("ERROR", res) # no error circular !!!
+ # self.assertIn("unable to evaluate $cc", res)
+ # self.assertIn("unable to evaluate $bb", res)
+ res = cfg.bb
+ DBG.write("test_120 cfg.bb debug", res, True)
+
+ res = cfg.cc
+ DBG.write("test_120 cfg.cc debug", res, True)
+
def test_999(self):
# one shot tearDown() for this TestCase
# SAT.setLocale() # end test english